agno 2.2.13__py3-none-any.whl → 2.4.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (383) hide show
  1. agno/agent/__init__.py +6 -0
  2. agno/agent/agent.py +5252 -3145
  3. agno/agent/remote.py +525 -0
  4. agno/api/api.py +2 -0
  5. agno/client/__init__.py +3 -0
  6. agno/client/a2a/__init__.py +10 -0
  7. agno/client/a2a/client.py +554 -0
  8. agno/client/a2a/schemas.py +112 -0
  9. agno/client/a2a/utils.py +369 -0
  10. agno/client/os.py +2669 -0
  11. agno/compression/__init__.py +3 -0
  12. agno/compression/manager.py +247 -0
  13. agno/culture/manager.py +2 -2
  14. agno/db/base.py +927 -6
  15. agno/db/dynamo/dynamo.py +788 -2
  16. agno/db/dynamo/schemas.py +128 -0
  17. agno/db/dynamo/utils.py +26 -3
  18. agno/db/firestore/firestore.py +674 -50
  19. agno/db/firestore/schemas.py +41 -0
  20. agno/db/firestore/utils.py +25 -10
  21. agno/db/gcs_json/gcs_json_db.py +506 -3
  22. agno/db/gcs_json/utils.py +14 -2
  23. agno/db/in_memory/in_memory_db.py +203 -4
  24. agno/db/in_memory/utils.py +14 -2
  25. agno/db/json/json_db.py +498 -2
  26. agno/db/json/utils.py +14 -2
  27. agno/db/migrations/manager.py +199 -0
  28. agno/db/migrations/utils.py +19 -0
  29. agno/db/migrations/v1_to_v2.py +54 -16
  30. agno/db/migrations/versions/__init__.py +0 -0
  31. agno/db/migrations/versions/v2_3_0.py +977 -0
  32. agno/db/mongo/async_mongo.py +1013 -39
  33. agno/db/mongo/mongo.py +684 -4
  34. agno/db/mongo/schemas.py +48 -0
  35. agno/db/mongo/utils.py +17 -0
  36. agno/db/mysql/__init__.py +2 -1
  37. agno/db/mysql/async_mysql.py +2958 -0
  38. agno/db/mysql/mysql.py +722 -53
  39. agno/db/mysql/schemas.py +77 -11
  40. agno/db/mysql/utils.py +151 -8
  41. agno/db/postgres/async_postgres.py +1254 -137
  42. agno/db/postgres/postgres.py +2316 -93
  43. agno/db/postgres/schemas.py +153 -21
  44. agno/db/postgres/utils.py +22 -7
  45. agno/db/redis/redis.py +531 -3
  46. agno/db/redis/schemas.py +36 -0
  47. agno/db/redis/utils.py +31 -15
  48. agno/db/schemas/evals.py +1 -0
  49. agno/db/schemas/memory.py +20 -9
  50. agno/db/singlestore/schemas.py +70 -1
  51. agno/db/singlestore/singlestore.py +737 -74
  52. agno/db/singlestore/utils.py +13 -3
  53. agno/db/sqlite/async_sqlite.py +1069 -89
  54. agno/db/sqlite/schemas.py +133 -1
  55. agno/db/sqlite/sqlite.py +2203 -165
  56. agno/db/sqlite/utils.py +21 -11
  57. agno/db/surrealdb/models.py +25 -0
  58. agno/db/surrealdb/surrealdb.py +603 -1
  59. agno/db/utils.py +60 -0
  60. agno/eval/__init__.py +26 -3
  61. agno/eval/accuracy.py +25 -12
  62. agno/eval/agent_as_judge.py +871 -0
  63. agno/eval/base.py +29 -0
  64. agno/eval/performance.py +10 -4
  65. agno/eval/reliability.py +22 -13
  66. agno/eval/utils.py +2 -1
  67. agno/exceptions.py +42 -0
  68. agno/hooks/__init__.py +3 -0
  69. agno/hooks/decorator.py +164 -0
  70. agno/integrations/discord/client.py +13 -2
  71. agno/knowledge/__init__.py +4 -0
  72. agno/knowledge/chunking/code.py +90 -0
  73. agno/knowledge/chunking/document.py +65 -4
  74. agno/knowledge/chunking/fixed.py +4 -1
  75. agno/knowledge/chunking/markdown.py +102 -11
  76. agno/knowledge/chunking/recursive.py +2 -2
  77. agno/knowledge/chunking/semantic.py +130 -48
  78. agno/knowledge/chunking/strategy.py +18 -0
  79. agno/knowledge/embedder/azure_openai.py +0 -1
  80. agno/knowledge/embedder/google.py +1 -1
  81. agno/knowledge/embedder/mistral.py +1 -1
  82. agno/knowledge/embedder/nebius.py +1 -1
  83. agno/knowledge/embedder/openai.py +16 -12
  84. agno/knowledge/filesystem.py +412 -0
  85. agno/knowledge/knowledge.py +4261 -1199
  86. agno/knowledge/protocol.py +134 -0
  87. agno/knowledge/reader/arxiv_reader.py +3 -2
  88. agno/knowledge/reader/base.py +9 -7
  89. agno/knowledge/reader/csv_reader.py +91 -42
  90. agno/knowledge/reader/docx_reader.py +9 -10
  91. agno/knowledge/reader/excel_reader.py +225 -0
  92. agno/knowledge/reader/field_labeled_csv_reader.py +38 -48
  93. agno/knowledge/reader/firecrawl_reader.py +3 -2
  94. agno/knowledge/reader/json_reader.py +16 -22
  95. agno/knowledge/reader/markdown_reader.py +15 -14
  96. agno/knowledge/reader/pdf_reader.py +33 -28
  97. agno/knowledge/reader/pptx_reader.py +9 -10
  98. agno/knowledge/reader/reader_factory.py +135 -1
  99. agno/knowledge/reader/s3_reader.py +8 -16
  100. agno/knowledge/reader/tavily_reader.py +3 -3
  101. agno/knowledge/reader/text_reader.py +15 -14
  102. agno/knowledge/reader/utils/__init__.py +17 -0
  103. agno/knowledge/reader/utils/spreadsheet.py +114 -0
  104. agno/knowledge/reader/web_search_reader.py +8 -65
  105. agno/knowledge/reader/website_reader.py +16 -13
  106. agno/knowledge/reader/wikipedia_reader.py +36 -3
  107. agno/knowledge/reader/youtube_reader.py +3 -2
  108. agno/knowledge/remote_content/__init__.py +33 -0
  109. agno/knowledge/remote_content/config.py +266 -0
  110. agno/knowledge/remote_content/remote_content.py +105 -17
  111. agno/knowledge/utils.py +76 -22
  112. agno/learn/__init__.py +71 -0
  113. agno/learn/config.py +463 -0
  114. agno/learn/curate.py +185 -0
  115. agno/learn/machine.py +725 -0
  116. agno/learn/schemas.py +1114 -0
  117. agno/learn/stores/__init__.py +38 -0
  118. agno/learn/stores/decision_log.py +1156 -0
  119. agno/learn/stores/entity_memory.py +3275 -0
  120. agno/learn/stores/learned_knowledge.py +1583 -0
  121. agno/learn/stores/protocol.py +117 -0
  122. agno/learn/stores/session_context.py +1217 -0
  123. agno/learn/stores/user_memory.py +1495 -0
  124. agno/learn/stores/user_profile.py +1220 -0
  125. agno/learn/utils.py +209 -0
  126. agno/media.py +22 -6
  127. agno/memory/__init__.py +14 -1
  128. agno/memory/manager.py +223 -8
  129. agno/memory/strategies/__init__.py +15 -0
  130. agno/memory/strategies/base.py +66 -0
  131. agno/memory/strategies/summarize.py +196 -0
  132. agno/memory/strategies/types.py +37 -0
  133. agno/models/aimlapi/aimlapi.py +17 -0
  134. agno/models/anthropic/claude.py +434 -59
  135. agno/models/aws/bedrock.py +121 -20
  136. agno/models/aws/claude.py +131 -274
  137. agno/models/azure/ai_foundry.py +10 -6
  138. agno/models/azure/openai_chat.py +33 -10
  139. agno/models/base.py +1162 -561
  140. agno/models/cerebras/cerebras.py +120 -24
  141. agno/models/cerebras/cerebras_openai.py +21 -2
  142. agno/models/cohere/chat.py +65 -6
  143. agno/models/cometapi/cometapi.py +18 -1
  144. agno/models/dashscope/dashscope.py +2 -3
  145. agno/models/deepinfra/deepinfra.py +18 -1
  146. agno/models/deepseek/deepseek.py +69 -3
  147. agno/models/fireworks/fireworks.py +18 -1
  148. agno/models/google/gemini.py +959 -89
  149. agno/models/google/utils.py +22 -0
  150. agno/models/groq/groq.py +48 -18
  151. agno/models/huggingface/huggingface.py +17 -6
  152. agno/models/ibm/watsonx.py +16 -6
  153. agno/models/internlm/internlm.py +18 -1
  154. agno/models/langdb/langdb.py +13 -1
  155. agno/models/litellm/chat.py +88 -9
  156. agno/models/litellm/litellm_openai.py +18 -1
  157. agno/models/message.py +24 -5
  158. agno/models/meta/llama.py +40 -13
  159. agno/models/meta/llama_openai.py +22 -21
  160. agno/models/metrics.py +12 -0
  161. agno/models/mistral/mistral.py +8 -4
  162. agno/models/n1n/__init__.py +3 -0
  163. agno/models/n1n/n1n.py +57 -0
  164. agno/models/nebius/nebius.py +6 -7
  165. agno/models/nvidia/nvidia.py +20 -3
  166. agno/models/ollama/__init__.py +2 -0
  167. agno/models/ollama/chat.py +17 -6
  168. agno/models/ollama/responses.py +100 -0
  169. agno/models/openai/__init__.py +2 -0
  170. agno/models/openai/chat.py +117 -26
  171. agno/models/openai/open_responses.py +46 -0
  172. agno/models/openai/responses.py +110 -32
  173. agno/models/openrouter/__init__.py +2 -0
  174. agno/models/openrouter/openrouter.py +67 -2
  175. agno/models/openrouter/responses.py +146 -0
  176. agno/models/perplexity/perplexity.py +19 -1
  177. agno/models/portkey/portkey.py +7 -6
  178. agno/models/requesty/requesty.py +19 -2
  179. agno/models/response.py +20 -2
  180. agno/models/sambanova/sambanova.py +20 -3
  181. agno/models/siliconflow/siliconflow.py +19 -2
  182. agno/models/together/together.py +20 -3
  183. agno/models/vercel/v0.py +20 -3
  184. agno/models/vertexai/claude.py +124 -4
  185. agno/models/vllm/vllm.py +19 -14
  186. agno/models/xai/xai.py +19 -2
  187. agno/os/app.py +467 -137
  188. agno/os/auth.py +253 -5
  189. agno/os/config.py +22 -0
  190. agno/os/interfaces/a2a/a2a.py +7 -6
  191. agno/os/interfaces/a2a/router.py +635 -26
  192. agno/os/interfaces/a2a/utils.py +32 -33
  193. agno/os/interfaces/agui/agui.py +5 -3
  194. agno/os/interfaces/agui/router.py +26 -16
  195. agno/os/interfaces/agui/utils.py +97 -57
  196. agno/os/interfaces/base.py +7 -7
  197. agno/os/interfaces/slack/router.py +16 -7
  198. agno/os/interfaces/slack/slack.py +7 -7
  199. agno/os/interfaces/whatsapp/router.py +35 -7
  200. agno/os/interfaces/whatsapp/security.py +3 -1
  201. agno/os/interfaces/whatsapp/whatsapp.py +11 -8
  202. agno/os/managers.py +326 -0
  203. agno/os/mcp.py +652 -79
  204. agno/os/middleware/__init__.py +4 -0
  205. agno/os/middleware/jwt.py +718 -115
  206. agno/os/middleware/trailing_slash.py +27 -0
  207. agno/os/router.py +105 -1558
  208. agno/os/routers/agents/__init__.py +3 -0
  209. agno/os/routers/agents/router.py +655 -0
  210. agno/os/routers/agents/schema.py +288 -0
  211. agno/os/routers/components/__init__.py +3 -0
  212. agno/os/routers/components/components.py +475 -0
  213. agno/os/routers/database.py +155 -0
  214. agno/os/routers/evals/evals.py +111 -18
  215. agno/os/routers/evals/schemas.py +38 -5
  216. agno/os/routers/evals/utils.py +80 -11
  217. agno/os/routers/health.py +3 -3
  218. agno/os/routers/knowledge/knowledge.py +284 -35
  219. agno/os/routers/knowledge/schemas.py +14 -2
  220. agno/os/routers/memory/memory.py +274 -11
  221. agno/os/routers/memory/schemas.py +44 -3
  222. agno/os/routers/metrics/metrics.py +30 -15
  223. agno/os/routers/metrics/schemas.py +10 -6
  224. agno/os/routers/registry/__init__.py +3 -0
  225. agno/os/routers/registry/registry.py +337 -0
  226. agno/os/routers/session/session.py +143 -14
  227. agno/os/routers/teams/__init__.py +3 -0
  228. agno/os/routers/teams/router.py +550 -0
  229. agno/os/routers/teams/schema.py +280 -0
  230. agno/os/routers/traces/__init__.py +3 -0
  231. agno/os/routers/traces/schemas.py +414 -0
  232. agno/os/routers/traces/traces.py +549 -0
  233. agno/os/routers/workflows/__init__.py +3 -0
  234. agno/os/routers/workflows/router.py +757 -0
  235. agno/os/routers/workflows/schema.py +139 -0
  236. agno/os/schema.py +157 -584
  237. agno/os/scopes.py +469 -0
  238. agno/os/settings.py +3 -0
  239. agno/os/utils.py +574 -185
  240. agno/reasoning/anthropic.py +85 -1
  241. agno/reasoning/azure_ai_foundry.py +93 -1
  242. agno/reasoning/deepseek.py +102 -2
  243. agno/reasoning/default.py +6 -7
  244. agno/reasoning/gemini.py +87 -3
  245. agno/reasoning/groq.py +109 -2
  246. agno/reasoning/helpers.py +6 -7
  247. agno/reasoning/manager.py +1238 -0
  248. agno/reasoning/ollama.py +93 -1
  249. agno/reasoning/openai.py +115 -1
  250. agno/reasoning/vertexai.py +85 -1
  251. agno/registry/__init__.py +3 -0
  252. agno/registry/registry.py +68 -0
  253. agno/remote/__init__.py +3 -0
  254. agno/remote/base.py +581 -0
  255. agno/run/__init__.py +2 -4
  256. agno/run/agent.py +134 -19
  257. agno/run/base.py +49 -1
  258. agno/run/cancel.py +65 -52
  259. agno/run/cancellation_management/__init__.py +9 -0
  260. agno/run/cancellation_management/base.py +78 -0
  261. agno/run/cancellation_management/in_memory_cancellation_manager.py +100 -0
  262. agno/run/cancellation_management/redis_cancellation_manager.py +236 -0
  263. agno/run/requirement.py +181 -0
  264. agno/run/team.py +111 -19
  265. agno/run/workflow.py +2 -1
  266. agno/session/agent.py +57 -92
  267. agno/session/summary.py +1 -1
  268. agno/session/team.py +62 -115
  269. agno/session/workflow.py +353 -57
  270. agno/skills/__init__.py +17 -0
  271. agno/skills/agent_skills.py +377 -0
  272. agno/skills/errors.py +32 -0
  273. agno/skills/loaders/__init__.py +4 -0
  274. agno/skills/loaders/base.py +27 -0
  275. agno/skills/loaders/local.py +216 -0
  276. agno/skills/skill.py +65 -0
  277. agno/skills/utils.py +107 -0
  278. agno/skills/validator.py +277 -0
  279. agno/table.py +10 -0
  280. agno/team/__init__.py +5 -1
  281. agno/team/remote.py +447 -0
  282. agno/team/team.py +3769 -2202
  283. agno/tools/brandfetch.py +27 -18
  284. agno/tools/browserbase.py +225 -16
  285. agno/tools/crawl4ai.py +3 -0
  286. agno/tools/duckduckgo.py +25 -71
  287. agno/tools/exa.py +0 -21
  288. agno/tools/file.py +14 -13
  289. agno/tools/file_generation.py +12 -6
  290. agno/tools/firecrawl.py +15 -7
  291. agno/tools/function.py +94 -113
  292. agno/tools/google_bigquery.py +11 -2
  293. agno/tools/google_drive.py +4 -3
  294. agno/tools/knowledge.py +9 -4
  295. agno/tools/mcp/mcp.py +301 -18
  296. agno/tools/mcp/multi_mcp.py +269 -14
  297. agno/tools/mem0.py +11 -10
  298. agno/tools/memory.py +47 -46
  299. agno/tools/mlx_transcribe.py +10 -7
  300. agno/tools/models/nebius.py +5 -5
  301. agno/tools/models_labs.py +20 -10
  302. agno/tools/nano_banana.py +151 -0
  303. agno/tools/parallel.py +0 -7
  304. agno/tools/postgres.py +76 -36
  305. agno/tools/python.py +14 -6
  306. agno/tools/reasoning.py +30 -23
  307. agno/tools/redshift.py +406 -0
  308. agno/tools/shopify.py +1519 -0
  309. agno/tools/spotify.py +919 -0
  310. agno/tools/tavily.py +4 -1
  311. agno/tools/toolkit.py +253 -18
  312. agno/tools/websearch.py +93 -0
  313. agno/tools/website.py +1 -1
  314. agno/tools/wikipedia.py +1 -1
  315. agno/tools/workflow.py +56 -48
  316. agno/tools/yfinance.py +12 -11
  317. agno/tracing/__init__.py +12 -0
  318. agno/tracing/exporter.py +161 -0
  319. agno/tracing/schemas.py +276 -0
  320. agno/tracing/setup.py +112 -0
  321. agno/utils/agent.py +251 -10
  322. agno/utils/cryptography.py +22 -0
  323. agno/utils/dttm.py +33 -0
  324. agno/utils/events.py +264 -7
  325. agno/utils/hooks.py +111 -3
  326. agno/utils/http.py +161 -2
  327. agno/utils/mcp.py +49 -8
  328. agno/utils/media.py +22 -1
  329. agno/utils/models/ai_foundry.py +9 -2
  330. agno/utils/models/claude.py +20 -5
  331. agno/utils/models/cohere.py +9 -2
  332. agno/utils/models/llama.py +9 -2
  333. agno/utils/models/mistral.py +4 -2
  334. agno/utils/os.py +0 -0
  335. agno/utils/print_response/agent.py +99 -16
  336. agno/utils/print_response/team.py +223 -24
  337. agno/utils/print_response/workflow.py +0 -2
  338. agno/utils/prompts.py +8 -6
  339. agno/utils/remote.py +23 -0
  340. agno/utils/response.py +1 -13
  341. agno/utils/string.py +91 -2
  342. agno/utils/team.py +62 -12
  343. agno/utils/tokens.py +657 -0
  344. agno/vectordb/base.py +15 -2
  345. agno/vectordb/cassandra/cassandra.py +1 -1
  346. agno/vectordb/chroma/__init__.py +2 -1
  347. agno/vectordb/chroma/chromadb.py +468 -23
  348. agno/vectordb/clickhouse/clickhousedb.py +1 -1
  349. agno/vectordb/couchbase/couchbase.py +6 -2
  350. agno/vectordb/lancedb/lance_db.py +7 -38
  351. agno/vectordb/lightrag/lightrag.py +7 -6
  352. agno/vectordb/milvus/milvus.py +118 -84
  353. agno/vectordb/mongodb/__init__.py +2 -1
  354. agno/vectordb/mongodb/mongodb.py +14 -31
  355. agno/vectordb/pgvector/pgvector.py +120 -66
  356. agno/vectordb/pineconedb/pineconedb.py +2 -19
  357. agno/vectordb/qdrant/__init__.py +2 -1
  358. agno/vectordb/qdrant/qdrant.py +33 -56
  359. agno/vectordb/redis/__init__.py +2 -1
  360. agno/vectordb/redis/redisdb.py +19 -31
  361. agno/vectordb/singlestore/singlestore.py +17 -9
  362. agno/vectordb/surrealdb/surrealdb.py +2 -38
  363. agno/vectordb/weaviate/__init__.py +2 -1
  364. agno/vectordb/weaviate/weaviate.py +7 -3
  365. agno/workflow/__init__.py +5 -1
  366. agno/workflow/agent.py +2 -2
  367. agno/workflow/condition.py +12 -10
  368. agno/workflow/loop.py +28 -9
  369. agno/workflow/parallel.py +21 -13
  370. agno/workflow/remote.py +362 -0
  371. agno/workflow/router.py +12 -9
  372. agno/workflow/step.py +261 -36
  373. agno/workflow/steps.py +12 -8
  374. agno/workflow/types.py +40 -77
  375. agno/workflow/workflow.py +939 -213
  376. {agno-2.2.13.dist-info → agno-2.4.3.dist-info}/METADATA +134 -181
  377. agno-2.4.3.dist-info/RECORD +677 -0
  378. {agno-2.2.13.dist-info → agno-2.4.3.dist-info}/WHEEL +1 -1
  379. agno/tools/googlesearch.py +0 -98
  380. agno/tools/memori.py +0 -339
  381. agno-2.2.13.dist-info/RECORD +0 -575
  382. {agno-2.2.13.dist-info → agno-2.4.3.dist-info}/licenses/LICENSE +0 -0
  383. {agno-2.2.13.dist-info → agno-2.4.3.dist-info}/top_level.txt +0 -0
@@ -1,10 +1,14 @@
1
1
  import time
2
2
  from datetime import date, datetime, timedelta, timezone
3
3
  from pathlib import Path
4
- from typing import Any, Dict, List, Optional, Sequence, Tuple, Union, cast
4
+ from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Tuple, Union, cast
5
5
  from uuid import uuid4
6
6
 
7
- from agno.db.base import AsyncBaseDb, SessionType
7
+ if TYPE_CHECKING:
8
+ from agno.tracing.schemas import Span, Trace
9
+
10
+ from agno.db.base import AsyncBaseDb, ComponentType, SessionType
11
+ from agno.db.migrations.manager import MigrationManager
8
12
  from agno.db.schemas.culture import CulturalKnowledge
9
13
  from agno.db.schemas.evals import EvalFilterType, EvalRunRecord, EvalType
10
14
  from agno.db.schemas.knowledge import KnowledgeRow
@@ -27,7 +31,7 @@ from agno.utils.log import log_debug, log_error, log_info, log_warning
27
31
  from agno.utils.string import generate_id
28
32
 
29
33
  try:
30
- from sqlalchemy import Column, MetaData, Table, and_, func, select, text
34
+ from sqlalchemy import Column, ForeignKey, MetaData, String, Table, func, select, text
31
35
  from sqlalchemy.dialects import sqlite
32
36
  from sqlalchemy.ext.asyncio import AsyncEngine, async_sessionmaker, create_async_engine
33
37
  from sqlalchemy.schema import Index, UniqueConstraint
@@ -47,6 +51,10 @@ class AsyncSqliteDb(AsyncBaseDb):
47
51
  metrics_table: Optional[str] = None,
48
52
  eval_table: Optional[str] = None,
49
53
  knowledge_table: Optional[str] = None,
54
+ traces_table: Optional[str] = None,
55
+ spans_table: Optional[str] = None,
56
+ versions_table: Optional[str] = None,
57
+ learnings_table: Optional[str] = None,
50
58
  id: Optional[str] = None,
51
59
  ):
52
60
  """
@@ -68,6 +76,10 @@ class AsyncSqliteDb(AsyncBaseDb):
68
76
  metrics_table (Optional[str]): Name of the table to store metrics.
69
77
  eval_table (Optional[str]): Name of the table to store evaluation runs data.
70
78
  knowledge_table (Optional[str]): Name of the table to store knowledge documents data.
79
+ traces_table (Optional[str]): Name of the table to store run traces.
80
+ spans_table (Optional[str]): Name of the table to store span events.
81
+ versions_table (Optional[str]): Name of the table to store schema versions.
82
+ learnings_table (Optional[str]): Name of the table to store learning records.
71
83
  id (Optional[str]): ID of the database.
72
84
 
73
85
  Raises:
@@ -85,6 +97,10 @@ class AsyncSqliteDb(AsyncBaseDb):
85
97
  metrics_table=metrics_table,
86
98
  eval_table=eval_table,
87
99
  knowledge_table=knowledge_table,
100
+ traces_table=traces_table,
101
+ spans_table=spans_table,
102
+ versions_table=versions_table,
103
+ learnings_table=learnings_table,
88
104
  )
89
105
 
90
106
  _engine: Optional[AsyncEngine] = db_engine
@@ -111,6 +127,15 @@ class AsyncSqliteDb(AsyncBaseDb):
111
127
  # Initialize database session factory
112
128
  self.async_session_factory = async_sessionmaker(bind=self.db_engine, expire_on_commit=False)
113
129
 
130
+ async def close(self) -> None:
131
+ """Close database connections and dispose of the connection pool.
132
+
133
+ Should be called during application shutdown to properly release
134
+ all database connections.
135
+ """
136
+ if self.db_engine is not None:
137
+ await self.db_engine.dispose()
138
+
114
139
  # -- DB methods --
115
140
  async def table_exists(self, table_name: str) -> bool:
116
141
  """Check if a table with the given name exists in the SQLite database.
@@ -132,10 +157,14 @@ class AsyncSqliteDb(AsyncBaseDb):
132
157
  (self.metrics_table_name, "metrics"),
133
158
  (self.eval_table_name, "evals"),
134
159
  (self.knowledge_table_name, "knowledge"),
160
+ (self.versions_table_name, "versions"),
161
+ (self.learnings_table_name, "learnings"),
135
162
  ]
136
163
 
137
164
  for table_name, table_type in tables_to_create:
138
- await self._create_table(table_name=table_name, table_type=table_type)
165
+ await self._get_or_create_table(
166
+ table_name=table_name, table_type=table_type, create_table_if_not_found=True
167
+ )
139
168
 
140
169
  async def _create_table(self, table_name: str, table_type: str) -> Table:
141
170
  """
@@ -149,8 +178,8 @@ class AsyncSqliteDb(AsyncBaseDb):
149
178
  Table: SQLAlchemy Table object
150
179
  """
151
180
  try:
152
- table_schema = get_table_schema_definition(table_type)
153
- log_debug(f"Creating table {table_name}")
181
+ # Pass traces_table_name for spans table foreign key resolution
182
+ table_schema = get_table_schema_definition(table_type, traces_table_name=self.trace_table_name).copy()
154
183
 
155
184
  columns: List[Column] = []
156
185
  indexes: List[str] = []
@@ -172,11 +201,14 @@ class AsyncSqliteDb(AsyncBaseDb):
172
201
  column_kwargs["unique"] = True
173
202
  unique_constraints.append(col_name)
174
203
 
204
+ # Handle foreign key constraint
205
+ if "foreign_key" in col_config:
206
+ column_args.append(ForeignKey(col_config["foreign_key"]))
207
+
175
208
  columns.append(Column(*column_args, **column_kwargs)) # type: ignore
176
209
 
177
210
  # Create the table object
178
- table_metadata = MetaData()
179
- table = Table(table_name, table_metadata, *columns)
211
+ table = Table(table_name, self.metadata, *columns)
180
212
 
181
213
  # Add multi-column unique constraints with table-specific names
182
214
  for constraint in schema_unique_constraints:
@@ -190,13 +222,18 @@ class AsyncSqliteDb(AsyncBaseDb):
190
222
  table.append_constraint(Index(idx_name, idx_col))
191
223
 
192
224
  # Create table
193
- async with self.db_engine.begin() as conn:
194
- await conn.run_sync(table.create, checkfirst=True)
225
+ table_created = False
226
+ if not await self.table_exists(table_name):
227
+ async with self.db_engine.begin() as conn:
228
+ await conn.run_sync(table.create, checkfirst=True)
229
+ log_debug(f"Successfully created table '{table_name}'")
230
+ table_created = True
231
+ else:
232
+ log_debug(f"Table {table_name} already exists, skipping creation")
195
233
 
196
234
  # Create indexes
197
235
  for idx in table.indexes:
198
236
  try:
199
- log_debug(f"Creating index: {idx.name}")
200
237
  # Check if index already exists
201
238
  async with self.async_session_factory() as sess:
202
239
  exists_query = text("SELECT 1 FROM sqlite_master WHERE type = 'index' AND name = :index_name")
@@ -208,66 +245,106 @@ class AsyncSqliteDb(AsyncBaseDb):
208
245
 
209
246
  async with self.db_engine.begin() as conn:
210
247
  await conn.run_sync(idx.create)
248
+ log_debug(f"Created index: {idx.name} for table {table_name}")
211
249
 
212
250
  except Exception as e:
213
251
  log_warning(f"Error creating index {idx.name}: {e}")
214
252
 
215
- log_debug(f"Successfully created table '{table_name}'")
253
+ # Store the schema version for the created table
254
+ if table_name != self.versions_table_name and table_created:
255
+ latest_schema_version = MigrationManager(self).latest_schema_version
256
+ await self.upsert_schema_version(table_name=table_name, version=latest_schema_version.public)
257
+
216
258
  return table
217
259
 
218
260
  except Exception as e:
219
261
  log_error(f"Could not create table '{table_name}': {e}")
220
262
  raise e
221
263
 
222
- async def _get_table(self, table_type: str) -> Optional[Table]:
264
+ async def _get_table(self, table_type: str, create_table_if_not_found: Optional[bool] = False) -> Optional[Table]:
223
265
  if table_type == "sessions":
224
- if not hasattr(self, "session_table"):
225
- self.session_table = await self._get_or_create_table(
226
- table_name=self.session_table_name,
227
- table_type=table_type,
228
- )
266
+ self.session_table = await self._get_or_create_table(
267
+ table_name=self.session_table_name,
268
+ table_type=table_type,
269
+ create_table_if_not_found=create_table_if_not_found,
270
+ )
229
271
  return self.session_table
230
272
 
231
273
  elif table_type == "memories":
232
- if not hasattr(self, "memory_table"):
233
- self.memory_table = await self._get_or_create_table(
234
- table_name=self.memory_table_name,
235
- table_type="memories",
236
- )
274
+ self.memory_table = await self._get_or_create_table(
275
+ table_name=self.memory_table_name,
276
+ table_type="memories",
277
+ create_table_if_not_found=create_table_if_not_found,
278
+ )
237
279
  return self.memory_table
238
280
 
239
281
  elif table_type == "metrics":
240
- if not hasattr(self, "metrics_table"):
241
- self.metrics_table = await self._get_or_create_table(
242
- table_name=self.metrics_table_name,
243
- table_type="metrics",
244
- )
282
+ self.metrics_table = await self._get_or_create_table(
283
+ table_name=self.metrics_table_name,
284
+ table_type="metrics",
285
+ create_table_if_not_found=create_table_if_not_found,
286
+ )
245
287
  return self.metrics_table
246
288
 
247
289
  elif table_type == "evals":
248
- if not hasattr(self, "eval_table"):
249
- self.eval_table = await self._get_or_create_table(
250
- table_name=self.eval_table_name,
251
- table_type="evals",
252
- )
290
+ self.eval_table = await self._get_or_create_table(
291
+ table_name=self.eval_table_name,
292
+ table_type="evals",
293
+ create_table_if_not_found=create_table_if_not_found,
294
+ )
253
295
  return self.eval_table
254
296
 
255
297
  elif table_type == "knowledge":
256
- if not hasattr(self, "knowledge_table"):
257
- self.knowledge_table = await self._get_or_create_table(
258
- table_name=self.knowledge_table_name,
259
- table_type="knowledge",
260
- )
298
+ self.knowledge_table = await self._get_or_create_table(
299
+ table_name=self.knowledge_table_name,
300
+ table_type="knowledge",
301
+ create_table_if_not_found=create_table_if_not_found,
302
+ )
261
303
  return self.knowledge_table
262
304
 
263
305
  elif table_type == "culture":
264
- if not hasattr(self, "culture_table"):
265
- self.culture_table = await self._get_or_create_table(
266
- table_name=self.culture_table_name,
267
- table_type="culture",
268
- )
306
+ self.culture_table = await self._get_or_create_table(
307
+ table_name=self.culture_table_name,
308
+ table_type="culture",
309
+ create_table_if_not_found=create_table_if_not_found,
310
+ )
269
311
  return self.culture_table
270
312
 
313
+ elif table_type == "versions":
314
+ self.versions_table = await self._get_or_create_table(
315
+ table_name=self.versions_table_name,
316
+ table_type="versions",
317
+ create_table_if_not_found=create_table_if_not_found,
318
+ )
319
+ return self.versions_table
320
+
321
+ elif table_type == "traces":
322
+ self.traces_table = await self._get_or_create_table(
323
+ table_name=self.trace_table_name,
324
+ table_type="traces",
325
+ create_table_if_not_found=create_table_if_not_found,
326
+ )
327
+ return self.traces_table
328
+
329
+ elif table_type == "spans":
330
+ # Ensure traces table exists first (spans has FK to traces)
331
+ if create_table_if_not_found:
332
+ await self._get_table(table_type="traces", create_table_if_not_found=True)
333
+ self.spans_table = await self._get_or_create_table(
334
+ table_name=self.span_table_name,
335
+ table_type="spans",
336
+ create_table_if_not_found=create_table_if_not_found,
337
+ )
338
+ return self.spans_table
339
+
340
+ elif table_type == "learnings":
341
+ self.learnings_table = await self._get_or_create_table(
342
+ table_name=self.learnings_table_name,
343
+ table_type="learnings",
344
+ create_table_if_not_found=create_table_if_not_found,
345
+ )
346
+ return self.learnings_table
347
+
271
348
  else:
272
349
  raise ValueError(f"Unknown table type: '{table_type}'")
273
350
 
@@ -275,7 +352,8 @@ class AsyncSqliteDb(AsyncBaseDb):
275
352
  self,
276
353
  table_name: str,
277
354
  table_type: str,
278
- ) -> Table:
355
+ create_table_if_not_found: Optional[bool] = False,
356
+ ) -> Optional[Table]:
279
357
  """
280
358
  Check if the table exists and is valid, else create it.
281
359
 
@@ -290,6 +368,8 @@ class AsyncSqliteDb(AsyncBaseDb):
290
368
  table_is_available = await ais_table_available(session=sess, table_name=table_name)
291
369
 
292
370
  if not table_is_available:
371
+ if not create_table_if_not_found:
372
+ return None
293
373
  return await self._create_table(table_name=table_name, table_type=table_type)
294
374
 
295
375
  # SQLite version of table validation (no schema)
@@ -303,13 +383,49 @@ class AsyncSqliteDb(AsyncBaseDb):
303
383
  return Table(table_name, self.metadata, autoload_with=connection)
304
384
 
305
385
  table = await conn.run_sync(load_table)
306
- log_debug(f"Loaded existing table {table_name}")
307
386
  return table
308
387
 
309
388
  except Exception as e:
310
389
  log_error(f"Error loading existing table {table_name}: {e}")
311
390
  raise e
312
391
 
392
+ async def get_latest_schema_version(self, table_name: str) -> str:
393
+ """Get the latest version of the database schema."""
394
+ table = await self._get_table(table_type="versions", create_table_if_not_found=True)
395
+ if table is None:
396
+ return "2.0.0"
397
+ async with self.async_session_factory() as sess:
398
+ stmt = select(table)
399
+ # Latest version for the given table
400
+ stmt = stmt.where(table.c.table_name == table_name)
401
+ stmt = stmt.order_by(table.c.version.desc()).limit(1)
402
+ result = await sess.execute(stmt)
403
+ row = result.fetchone()
404
+ if row is None:
405
+ return "2.0.0"
406
+ version_dict = dict(row._mapping)
407
+ return version_dict.get("version") or "2.0.0"
408
+
409
+ async def upsert_schema_version(self, table_name: str, version: str) -> None:
410
+ """Upsert the schema version into the database."""
411
+ table = await self._get_table(table_type="versions", create_table_if_not_found=True)
412
+ if table is None:
413
+ return
414
+ current_datetime = datetime.now().isoformat()
415
+ async with self.async_session_factory() as sess, sess.begin():
416
+ stmt = sqlite.insert(table).values(
417
+ table_name=table_name,
418
+ version=version,
419
+ created_at=current_datetime, # Store as ISO format string
420
+ updated_at=current_datetime,
421
+ )
422
+ # Update version if table_name already exists
423
+ stmt = stmt.on_conflict_do_update(
424
+ index_elements=["table_name"],
425
+ set_=dict(version=version, updated_at=current_datetime),
426
+ )
427
+ await sess.execute(stmt)
428
+
313
429
  # -- Session methods --
314
430
 
315
431
  async def delete_session(self, session_id: str) -> bool:
@@ -590,7 +706,7 @@ class AsyncSqliteDb(AsyncBaseDb):
590
706
  Exception: If an error occurs during upserting.
591
707
  """
592
708
  try:
593
- table = await self._get_table(table_type="sessions")
709
+ table = await self._get_table(table_type="sessions", create_table_if_not_found=True)
594
710
  if table is None:
595
711
  return None
596
712
 
@@ -736,7 +852,7 @@ class AsyncSqliteDb(AsyncBaseDb):
736
852
  return []
737
853
 
738
854
  try:
739
- table = await self._get_table(table_type="sessions")
855
+ table = await self._get_table(table_type="sessions", create_table_if_not_found=True)
740
856
  if table is None:
741
857
  log_info("Sessions table not available, falling back to individual upserts")
742
858
  return [
@@ -1017,7 +1133,7 @@ class AsyncSqliteDb(AsyncBaseDb):
1017
1133
 
1018
1134
  async with self.async_session_factory() as sess, sess.begin():
1019
1135
  # Select topics from all results
1020
- stmt = select(func.json_array_elements_text(table.c.topics)).select_from(table)
1136
+ stmt = select(table.c.topics)
1021
1137
  result = (await sess.execute(stmt)).fetchall()
1022
1138
 
1023
1139
  return list(set([record[0] for record in result]))
@@ -1122,8 +1238,8 @@ class AsyncSqliteDb(AsyncBaseDb):
1122
1238
  if team_id is not None:
1123
1239
  stmt = stmt.where(table.c.team_id == team_id)
1124
1240
  if topics is not None:
1125
- topic_conditions = [text(f"topics::text LIKE '%\"{topic}\"%'") for topic in topics]
1126
- stmt = stmt.where(and_(*topic_conditions))
1241
+ for topic in topics:
1242
+ stmt = stmt.where(func.cast(table.c.topics, String).like(f'%"{topic}"%'))
1127
1243
  if search_content is not None:
1128
1244
  stmt = stmt.where(table.c.memory.ilike(f"%{search_content}%"))
1129
1245
 
@@ -1158,12 +1274,14 @@ class AsyncSqliteDb(AsyncBaseDb):
1158
1274
  self,
1159
1275
  limit: Optional[int] = None,
1160
1276
  page: Optional[int] = None,
1277
+ user_id: Optional[str] = None,
1161
1278
  ) -> Tuple[List[Dict[str, Any]], int]:
1162
1279
  """Get user memories stats.
1163
1280
 
1164
1281
  Args:
1165
1282
  limit (Optional[int]): The maximum number of user stats to return.
1166
1283
  page (Optional[int]): The page number.
1284
+ user_id (Optional[str]): User ID for filtering.
1167
1285
 
1168
1286
  Returns:
1169
1287
  Tuple[List[Dict[str, Any]], int]: A list of dictionaries containing user stats and total count.
@@ -1186,17 +1304,19 @@ class AsyncSqliteDb(AsyncBaseDb):
1186
1304
  return [], 0
1187
1305
 
1188
1306
  async with self.async_session_factory() as sess, sess.begin():
1189
- stmt = (
1190
- select(
1191
- table.c.user_id,
1192
- func.count(table.c.memory_id).label("total_memories"),
1193
- func.max(table.c.updated_at).label("last_memory_updated_at"),
1194
- )
1195
- .where(table.c.user_id.is_not(None))
1196
- .group_by(table.c.user_id)
1197
- .order_by(func.max(table.c.updated_at).desc())
1307
+ stmt = select(
1308
+ table.c.user_id,
1309
+ func.count(table.c.memory_id).label("total_memories"),
1310
+ func.max(table.c.updated_at).label("last_memory_updated_at"),
1198
1311
  )
1199
1312
 
1313
+ if user_id is not None:
1314
+ stmt = stmt.where(table.c.user_id == user_id)
1315
+ else:
1316
+ stmt = stmt.where(table.c.user_id.is_not(None))
1317
+ stmt = stmt.group_by(table.c.user_id)
1318
+ stmt = stmt.order_by(func.max(table.c.updated_at).desc())
1319
+
1200
1320
  count_stmt = select(func.count()).select_from(stmt.alias())
1201
1321
  total_count = (await sess.execute(count_stmt)).scalar() or 0
1202
1322
 
@@ -1248,29 +1368,39 @@ class AsyncSqliteDb(AsyncBaseDb):
1248
1368
  if memory.memory_id is None:
1249
1369
  memory.memory_id = str(uuid4())
1250
1370
 
1251
- async with self.async_session_factory() as sess, sess.begin():
1252
- stmt = sqlite.insert(table).values(
1253
- user_id=memory.user_id,
1254
- agent_id=memory.agent_id,
1255
- team_id=memory.team_id,
1256
- memory_id=memory.memory_id,
1257
- memory=memory.memory,
1258
- topics=memory.topics,
1259
- input=memory.input,
1260
- updated_at=int(time.time()),
1261
- )
1262
- stmt = stmt.on_conflict_do_update( # type: ignore
1263
- index_elements=["memory_id"],
1264
- set_=dict(
1371
+ current_time = int(time.time())
1372
+
1373
+ async with self.async_session_factory() as sess:
1374
+ async with sess.begin():
1375
+ stmt = sqlite.insert(table).values(
1376
+ user_id=memory.user_id,
1377
+ agent_id=memory.agent_id,
1378
+ team_id=memory.team_id,
1379
+ memory_id=memory.memory_id,
1265
1380
  memory=memory.memory,
1266
1381
  topics=memory.topics,
1267
1382
  input=memory.input,
1268
- updated_at=int(time.time()),
1269
- ),
1270
- ).returning(table)
1383
+ feedback=memory.feedback,
1384
+ created_at=memory.created_at,
1385
+ updated_at=memory.created_at,
1386
+ )
1387
+ stmt = stmt.on_conflict_do_update( # type: ignore
1388
+ index_elements=["memory_id"],
1389
+ set_=dict(
1390
+ memory=memory.memory,
1391
+ topics=memory.topics,
1392
+ input=memory.input,
1393
+ agent_id=memory.agent_id,
1394
+ team_id=memory.team_id,
1395
+ feedback=memory.feedback,
1396
+ updated_at=current_time,
1397
+ # Preserve created_at on update - don't overwrite existing value
1398
+ created_at=table.c.created_at,
1399
+ ),
1400
+ ).returning(table)
1271
1401
 
1272
- result = await sess.execute(stmt)
1273
- row = result.fetchone()
1402
+ result = await sess.execute(stmt)
1403
+ row = result.fetchone()
1274
1404
 
1275
1405
  if row is None:
1276
1406
  return None
@@ -1321,12 +1451,14 @@ class AsyncSqliteDb(AsyncBaseDb):
1321
1451
  # Prepare bulk data
1322
1452
  bulk_data = []
1323
1453
  current_time = int(time.time())
1454
+
1324
1455
  for memory in memories:
1325
1456
  if memory.memory_id is None:
1326
1457
  memory.memory_id = str(uuid4())
1327
1458
 
1328
1459
  # Use preserved updated_at if flag is set and value exists, otherwise use current time
1329
1460
  updated_at = memory.updated_at if preserve_updated_at else current_time
1461
+
1330
1462
  bulk_data.append(
1331
1463
  {
1332
1464
  "user_id": memory.user_id,
@@ -1335,6 +1467,9 @@ class AsyncSqliteDb(AsyncBaseDb):
1335
1467
  "memory_id": memory.memory_id,
1336
1468
  "memory": memory.memory,
1337
1469
  "topics": memory.topics,
1470
+ "input": memory.input,
1471
+ "feedback": memory.feedback,
1472
+ "created_at": memory.created_at,
1338
1473
  "updated_at": updated_at,
1339
1474
  }
1340
1475
  )
@@ -1352,7 +1487,10 @@ class AsyncSqliteDb(AsyncBaseDb):
1352
1487
  input=stmt.excluded.input,
1353
1488
  agent_id=stmt.excluded.agent_id,
1354
1489
  team_id=stmt.excluded.team_id,
1490
+ feedback=stmt.excluded.feedback,
1355
1491
  updated_at=stmt.excluded.updated_at,
1492
+ # Preserve created_at on update
1493
+ created_at=table.c.created_at,
1356
1494
  ),
1357
1495
  )
1358
1496
  await sess.execute(stmt, bulk_data)
@@ -1422,7 +1560,7 @@ class AsyncSqliteDb(AsyncBaseDb):
1422
1560
  Exception: If an error occurs during retrieval.
1423
1561
  """
1424
1562
  try:
1425
- table = await self._get_table(table_type="sessions")
1563
+ table = await self._get_table(table_type="sessions", create_table_if_not_found=True)
1426
1564
  if table is None:
1427
1565
  return []
1428
1566
 
@@ -1707,7 +1845,7 @@ class AsyncSqliteDb(AsyncBaseDb):
1707
1845
  Optional[KnowledgeRow]: The upserted knowledge row, or None if the operation fails.
1708
1846
  """
1709
1847
  try:
1710
- table = await self._get_table(table_type="knowledge")
1848
+ table = await self._get_table(table_type="knowledge", create_table_if_not_found=True)
1711
1849
  if table is None:
1712
1850
  return None
1713
1851
 
@@ -1760,7 +1898,7 @@ class AsyncSqliteDb(AsyncBaseDb):
1760
1898
  Exception: If an error occurs during creation.
1761
1899
  """
1762
1900
  try:
1763
- table = await self._get_table(table_type="evals")
1901
+ table = await self._get_table(table_type="evals", create_table_if_not_found=True)
1764
1902
  if table is None:
1765
1903
  return None
1766
1904
 
@@ -2078,10 +2216,7 @@ class AsyncSqliteDb(AsyncBaseDb):
2078
2216
  await sess.execute(table.delete())
2079
2217
 
2080
2218
  except Exception as e:
2081
- from agno.utils.log import log_warning
2082
-
2083
- log_warning(f"Exception deleting all cultural artifacts: {e}")
2084
- raise e
2219
+ log_error(f"Exception deleting all cultural artifacts: {e}")
2085
2220
 
2086
2221
  async def delete_cultural_knowledge(self, id: str) -> None:
2087
2222
  """Delete a cultural artifact from the database.
@@ -2109,7 +2244,6 @@ class AsyncSqliteDb(AsyncBaseDb):
2109
2244
 
2110
2245
  except Exception as e:
2111
2246
  log_error(f"Error deleting cultural artifact: {e}")
2112
- raise e
2113
2247
 
2114
2248
  async def get_cultural_knowledge(
2115
2249
  self, id: str, deserialize: Optional[bool] = True
@@ -2145,7 +2279,7 @@ class AsyncSqliteDb(AsyncBaseDb):
2145
2279
 
2146
2280
  except Exception as e:
2147
2281
  log_error(f"Exception reading from cultural artifacts table: {e}")
2148
- raise e
2282
+ return None
2149
2283
 
2150
2284
  async def get_all_cultural_knowledge(
2151
2285
  self,
@@ -2219,7 +2353,7 @@ class AsyncSqliteDb(AsyncBaseDb):
2219
2353
 
2220
2354
  except Exception as e:
2221
2355
  log_error(f"Error reading from cultural artifacts table: {e}")
2222
- raise e
2356
+ return [] if deserialize else ([], 0)
2223
2357
 
2224
2358
  async def upsert_cultural_knowledge(
2225
2359
  self, cultural_knowledge: CulturalKnowledge, deserialize: Optional[bool] = True
@@ -2239,7 +2373,7 @@ class AsyncSqliteDb(AsyncBaseDb):
2239
2373
  Exception: If an error occurs during upsert.
2240
2374
  """
2241
2375
  try:
2242
- table = await self._get_table(table_type="culture")
2376
+ table = await self._get_table(table_type="culture", create_table_if_not_found=True)
2243
2377
  if table is None:
2244
2378
  return None
2245
2379
 
@@ -2291,3 +2425,849 @@ class AsyncSqliteDb(AsyncBaseDb):
2291
2425
  except Exception as e:
2292
2426
  log_error(f"Error upserting cultural knowledge: {e}")
2293
2427
  raise e
2428
+
2429
+ # --- Traces ---
2430
+ def _get_traces_base_query(self, table: Table, spans_table: Optional[Table] = None):
2431
+ """Build base query for traces with aggregated span counts.
2432
+
2433
+ Args:
2434
+ table: The traces table.
2435
+ spans_table: The spans table (optional).
2436
+
2437
+ Returns:
2438
+ SQLAlchemy select statement with total_spans and error_count calculated dynamically.
2439
+ """
2440
+ from sqlalchemy import case, literal
2441
+
2442
+ if spans_table is not None:
2443
+ # JOIN with spans table to calculate total_spans and error_count
2444
+ return (
2445
+ select(
2446
+ table,
2447
+ func.coalesce(func.count(spans_table.c.span_id), 0).label("total_spans"),
2448
+ func.coalesce(func.sum(case((spans_table.c.status_code == "ERROR", 1), else_=0)), 0).label(
2449
+ "error_count"
2450
+ ),
2451
+ )
2452
+ .select_from(table.outerjoin(spans_table, table.c.trace_id == spans_table.c.trace_id))
2453
+ .group_by(table.c.trace_id)
2454
+ )
2455
+ else:
2456
+ # Fallback if spans table doesn't exist
2457
+ return select(table, literal(0).label("total_spans"), literal(0).label("error_count"))
2458
+
2459
+ def _get_trace_component_level_expr(self, workflow_id_col, team_id_col, agent_id_col, name_col):
2460
+ """Build a SQL CASE expression that returns the component level for a trace.
2461
+
2462
+ Component levels (higher = more important):
2463
+ - 3: Workflow root (.run or .arun with workflow_id)
2464
+ - 2: Team root (.run or .arun with team_id)
2465
+ - 1: Agent root (.run or .arun with agent_id)
2466
+ - 0: Child span (not a root)
2467
+
2468
+ Args:
2469
+ workflow_id_col: SQL column/expression for workflow_id
2470
+ team_id_col: SQL column/expression for team_id
2471
+ agent_id_col: SQL column/expression for agent_id
2472
+ name_col: SQL column/expression for name
2473
+
2474
+ Returns:
2475
+ SQLAlchemy CASE expression returning the component level as an integer.
2476
+ """
2477
+ from sqlalchemy import and_, case, or_
2478
+
2479
+ is_root_name = or_(name_col.contains(".run"), name_col.contains(".arun"))
2480
+
2481
+ return case(
2482
+ # Workflow root (level 3)
2483
+ (and_(workflow_id_col.isnot(None), is_root_name), 3),
2484
+ # Team root (level 2)
2485
+ (and_(team_id_col.isnot(None), is_root_name), 2),
2486
+ # Agent root (level 1)
2487
+ (and_(agent_id_col.isnot(None), is_root_name), 1),
2488
+ # Child span or unknown (level 0)
2489
+ else_=0,
2490
+ )
2491
+
2492
+ async def upsert_trace(self, trace: "Trace") -> None:
2493
+ """Create or update a single trace record in the database.
2494
+
2495
+ Uses INSERT ... ON CONFLICT DO UPDATE (upsert) to handle concurrent inserts
2496
+ atomically and avoid race conditions.
2497
+
2498
+ Args:
2499
+ trace: The Trace object to store (one per trace_id).
2500
+ """
2501
+ from sqlalchemy import case
2502
+
2503
+ try:
2504
+ table = await self._get_table(table_type="traces", create_table_if_not_found=True)
2505
+ if table is None:
2506
+ return
2507
+
2508
+ trace_dict = trace.to_dict()
2509
+ trace_dict.pop("total_spans", None)
2510
+ trace_dict.pop("error_count", None)
2511
+
2512
+ async with self.async_session_factory() as sess, sess.begin():
2513
+ # Use upsert to handle concurrent inserts atomically
2514
+ # On conflict, update fields while preserving existing non-null context values
2515
+ # and keeping the earliest start_time
2516
+ insert_stmt = sqlite.insert(table).values(trace_dict)
2517
+
2518
+ # Build component level expressions for comparing trace priority
2519
+ new_level = self._get_trace_component_level_expr(
2520
+ insert_stmt.excluded.workflow_id,
2521
+ insert_stmt.excluded.team_id,
2522
+ insert_stmt.excluded.agent_id,
2523
+ insert_stmt.excluded.name,
2524
+ )
2525
+ existing_level = self._get_trace_component_level_expr(
2526
+ table.c.workflow_id,
2527
+ table.c.team_id,
2528
+ table.c.agent_id,
2529
+ table.c.name,
2530
+ )
2531
+
2532
+ # Build the ON CONFLICT DO UPDATE clause
2533
+ # Use MIN for start_time, MAX for end_time to capture full trace duration
2534
+ # SQLite stores timestamps as ISO strings, so string comparison works for ISO format
2535
+ # Duration is calculated as: (MAX(end_time) - MIN(start_time)) in milliseconds
2536
+ # SQLite doesn't have epoch extraction, so we calculate duration using julianday
2537
+ upsert_stmt = insert_stmt.on_conflict_do_update(
2538
+ index_elements=["trace_id"],
2539
+ set_={
2540
+ "end_time": func.max(table.c.end_time, insert_stmt.excluded.end_time),
2541
+ "start_time": func.min(table.c.start_time, insert_stmt.excluded.start_time),
2542
+ # Calculate duration in milliseconds using julianday (SQLite-specific)
2543
+ # julianday returns days, so multiply by 86400000 to get milliseconds
2544
+ "duration_ms": (
2545
+ func.julianday(func.max(table.c.end_time, insert_stmt.excluded.end_time))
2546
+ - func.julianday(func.min(table.c.start_time, insert_stmt.excluded.start_time))
2547
+ )
2548
+ * 86400000,
2549
+ "status": insert_stmt.excluded.status,
2550
+ # Update name only if new trace is from a higher-level component
2551
+ # Priority: workflow (3) > team (2) > agent (1) > child spans (0)
2552
+ "name": case(
2553
+ (new_level > existing_level, insert_stmt.excluded.name),
2554
+ else_=table.c.name,
2555
+ ),
2556
+ # Preserve existing non-null context values using COALESCE
2557
+ "run_id": func.coalesce(insert_stmt.excluded.run_id, table.c.run_id),
2558
+ "session_id": func.coalesce(insert_stmt.excluded.session_id, table.c.session_id),
2559
+ "user_id": func.coalesce(insert_stmt.excluded.user_id, table.c.user_id),
2560
+ "agent_id": func.coalesce(insert_stmt.excluded.agent_id, table.c.agent_id),
2561
+ "team_id": func.coalesce(insert_stmt.excluded.team_id, table.c.team_id),
2562
+ "workflow_id": func.coalesce(insert_stmt.excluded.workflow_id, table.c.workflow_id),
2563
+ },
2564
+ )
2565
+ await sess.execute(upsert_stmt)
2566
+
2567
+ except Exception as e:
2568
+ log_error(f"Error creating trace: {e}")
2569
+ # Don't raise - tracing should not break the main application flow
2570
+
2571
+ async def get_trace(
2572
+ self,
2573
+ trace_id: Optional[str] = None,
2574
+ run_id: Optional[str] = None,
2575
+ ):
2576
+ """Get a single trace by trace_id or other filters.
2577
+
2578
+ Args:
2579
+ trace_id: The unique trace identifier.
2580
+ run_id: Filter by run ID (returns first match).
2581
+
2582
+ Returns:
2583
+ Optional[Trace]: The trace if found, None otherwise.
2584
+
2585
+ Note:
2586
+ If multiple filters are provided, trace_id takes precedence.
2587
+ For other filters, the most recent trace is returned.
2588
+ """
2589
+ try:
2590
+ from agno.tracing.schemas import Trace
2591
+
2592
+ table = await self._get_table(table_type="traces")
2593
+ if table is None:
2594
+ return None
2595
+
2596
+ # Get spans table for JOIN
2597
+ spans_table = await self._get_table(table_type="spans")
2598
+
2599
+ async with self.async_session_factory() as sess:
2600
+ # Build query with aggregated span counts
2601
+ stmt = self._get_traces_base_query(table, spans_table)
2602
+
2603
+ if trace_id:
2604
+ stmt = stmt.where(table.c.trace_id == trace_id)
2605
+ elif run_id:
2606
+ stmt = stmt.where(table.c.run_id == run_id)
2607
+ else:
2608
+ log_debug("get_trace called without any filter parameters")
2609
+ return None
2610
+
2611
+ # Order by most recent and get first result
2612
+ stmt = stmt.order_by(table.c.start_time.desc()).limit(1)
2613
+ result = await sess.execute(stmt)
2614
+ row = result.fetchone()
2615
+
2616
+ if row:
2617
+ return Trace.from_dict(dict(row._mapping))
2618
+ return None
2619
+
2620
+ except Exception as e:
2621
+ log_error(f"Error getting trace: {e}")
2622
+ return None
2623
+
2624
+ async def get_traces(
2625
+ self,
2626
+ run_id: Optional[str] = None,
2627
+ session_id: Optional[str] = None,
2628
+ user_id: Optional[str] = None,
2629
+ agent_id: Optional[str] = None,
2630
+ team_id: Optional[str] = None,
2631
+ workflow_id: Optional[str] = None,
2632
+ status: Optional[str] = None,
2633
+ start_time: Optional[datetime] = None,
2634
+ end_time: Optional[datetime] = None,
2635
+ limit: Optional[int] = 20,
2636
+ page: Optional[int] = 1,
2637
+ ) -> tuple[List, int]:
2638
+ """Get traces matching the provided filters with pagination.
2639
+
2640
+ Args:
2641
+ run_id: Filter by run ID.
2642
+ session_id: Filter by session ID.
2643
+ user_id: Filter by user ID.
2644
+ agent_id: Filter by agent ID.
2645
+ team_id: Filter by team ID.
2646
+ workflow_id: Filter by workflow ID.
2647
+ status: Filter by status (OK, ERROR, UNSET).
2648
+ start_time: Filter traces starting after this datetime.
2649
+ end_time: Filter traces ending before this datetime.
2650
+ limit: Maximum number of traces to return per page.
2651
+ page: Page number (1-indexed).
2652
+
2653
+ Returns:
2654
+ tuple[List[Trace], int]: Tuple of (list of matching traces, total count).
2655
+ """
2656
+ try:
2657
+ from agno.tracing.schemas import Trace
2658
+
2659
+ table = await self._get_table(table_type="traces")
2660
+ if table is None:
2661
+ log_debug("Traces table not found")
2662
+ return [], 0
2663
+
2664
+ # Get spans table for JOIN
2665
+ spans_table = await self._get_table(table_type="spans")
2666
+
2667
+ async with self.async_session_factory() as sess:
2668
+ # Build base query with aggregated span counts
2669
+ base_stmt = self._get_traces_base_query(table, spans_table)
2670
+
2671
+ # Apply filters
2672
+ if run_id:
2673
+ base_stmt = base_stmt.where(table.c.run_id == run_id)
2674
+ if session_id:
2675
+ base_stmt = base_stmt.where(table.c.session_id == session_id)
2676
+ if user_id:
2677
+ base_stmt = base_stmt.where(table.c.user_id == user_id)
2678
+ if agent_id:
2679
+ base_stmt = base_stmt.where(table.c.agent_id == agent_id)
2680
+ if team_id:
2681
+ base_stmt = base_stmt.where(table.c.team_id == team_id)
2682
+ if workflow_id:
2683
+ base_stmt = base_stmt.where(table.c.workflow_id == workflow_id)
2684
+ if status:
2685
+ base_stmt = base_stmt.where(table.c.status == status)
2686
+ if start_time:
2687
+ # Convert datetime to ISO string for comparison
2688
+ base_stmt = base_stmt.where(table.c.start_time >= start_time.isoformat())
2689
+ if end_time:
2690
+ # Convert datetime to ISO string for comparison
2691
+ base_stmt = base_stmt.where(table.c.end_time <= end_time.isoformat())
2692
+
2693
+ # Get total count
2694
+ count_stmt = select(func.count()).select_from(base_stmt.alias())
2695
+ total_count = await sess.scalar(count_stmt) or 0
2696
+
2697
+ # Apply pagination
2698
+ offset = (page - 1) * limit if page and limit else 0
2699
+ paginated_stmt = base_stmt.order_by(table.c.start_time.desc()).limit(limit).offset(offset)
2700
+
2701
+ result = await sess.execute(paginated_stmt)
2702
+ results = result.fetchall()
2703
+
2704
+ traces = [Trace.from_dict(dict(row._mapping)) for row in results]
2705
+ return traces, total_count
2706
+
2707
+ except Exception as e:
2708
+ log_error(f"Error getting traces: {e}")
2709
+ return [], 0
2710
+
2711
+ async def get_trace_stats(
2712
+ self,
2713
+ user_id: Optional[str] = None,
2714
+ agent_id: Optional[str] = None,
2715
+ team_id: Optional[str] = None,
2716
+ workflow_id: Optional[str] = None,
2717
+ start_time: Optional[datetime] = None,
2718
+ end_time: Optional[datetime] = None,
2719
+ limit: Optional[int] = 20,
2720
+ page: Optional[int] = 1,
2721
+ ) -> tuple[List[Dict[str, Any]], int]:
2722
+ """Get trace statistics grouped by session.
2723
+
2724
+ Args:
2725
+ user_id: Filter by user ID.
2726
+ agent_id: Filter by agent ID.
2727
+ team_id: Filter by team ID.
2728
+ workflow_id: Filter by workflow ID.
2729
+ start_time: Filter sessions with traces created after this datetime.
2730
+ end_time: Filter sessions with traces created before this datetime.
2731
+ limit: Maximum number of sessions to return per page.
2732
+ page: Page number (1-indexed).
2733
+
2734
+ Returns:
2735
+ tuple[List[Dict], int]: Tuple of (list of session stats dicts, total count).
2736
+ Each dict contains: session_id, user_id, agent_id, team_id, total_traces,
2737
+ workflow_id, first_trace_at, last_trace_at.
2738
+ """
2739
+ try:
2740
+ table = await self._get_table(table_type="traces")
2741
+ if table is None:
2742
+ log_debug("Traces table not found")
2743
+ return [], 0
2744
+
2745
+ async with self.async_session_factory() as sess:
2746
+ # Build base query grouped by session_id
2747
+ base_stmt = (
2748
+ select(
2749
+ table.c.session_id,
2750
+ table.c.user_id,
2751
+ table.c.agent_id,
2752
+ table.c.team_id,
2753
+ table.c.workflow_id,
2754
+ func.count(table.c.trace_id).label("total_traces"),
2755
+ func.min(table.c.created_at).label("first_trace_at"),
2756
+ func.max(table.c.created_at).label("last_trace_at"),
2757
+ )
2758
+ .where(table.c.session_id.isnot(None)) # Only sessions with session_id
2759
+ .group_by(
2760
+ table.c.session_id, table.c.user_id, table.c.agent_id, table.c.team_id, table.c.workflow_id
2761
+ )
2762
+ )
2763
+
2764
+ # Apply filters
2765
+ if user_id:
2766
+ base_stmt = base_stmt.where(table.c.user_id == user_id)
2767
+ if workflow_id:
2768
+ base_stmt = base_stmt.where(table.c.workflow_id == workflow_id)
2769
+ if team_id:
2770
+ base_stmt = base_stmt.where(table.c.team_id == team_id)
2771
+ if agent_id:
2772
+ base_stmt = base_stmt.where(table.c.agent_id == agent_id)
2773
+ if start_time:
2774
+ # Convert datetime to ISO string for comparison
2775
+ base_stmt = base_stmt.where(table.c.created_at >= start_time.isoformat())
2776
+ if end_time:
2777
+ # Convert datetime to ISO string for comparison
2778
+ base_stmt = base_stmt.where(table.c.created_at <= end_time.isoformat())
2779
+
2780
+ # Get total count of sessions
2781
+ count_stmt = select(func.count()).select_from(base_stmt.alias())
2782
+ total_count = await sess.scalar(count_stmt) or 0
2783
+
2784
+ # Apply pagination and ordering
2785
+ offset = (page - 1) * limit if page and limit else 0
2786
+ paginated_stmt = base_stmt.order_by(func.max(table.c.created_at).desc()).limit(limit).offset(offset)
2787
+
2788
+ result = await sess.execute(paginated_stmt)
2789
+ results = result.fetchall()
2790
+
2791
+ # Convert to list of dicts with datetime objects
2792
+ stats_list = []
2793
+ for row in results:
2794
+ # Convert ISO strings to datetime objects
2795
+ first_trace_at_str = row.first_trace_at
2796
+ last_trace_at_str = row.last_trace_at
2797
+
2798
+ # Parse ISO format strings to datetime objects
2799
+ first_trace_at = datetime.fromisoformat(first_trace_at_str.replace("Z", "+00:00"))
2800
+ last_trace_at = datetime.fromisoformat(last_trace_at_str.replace("Z", "+00:00"))
2801
+
2802
+ stats_list.append(
2803
+ {
2804
+ "session_id": row.session_id,
2805
+ "user_id": row.user_id,
2806
+ "agent_id": row.agent_id,
2807
+ "team_id": row.team_id,
2808
+ "workflow_id": row.workflow_id,
2809
+ "total_traces": row.total_traces,
2810
+ "first_trace_at": first_trace_at,
2811
+ "last_trace_at": last_trace_at,
2812
+ }
2813
+ )
2814
+
2815
+ return stats_list, total_count
2816
+
2817
+ except Exception as e:
2818
+ log_error(f"Error getting trace stats: {e}")
2819
+ return [], 0
2820
+
2821
+ # --- Spans ---
2822
+ async def create_span(self, span: "Span") -> None:
2823
+ """Create a single span in the database.
2824
+
2825
+ Args:
2826
+ span: The Span object to store.
2827
+ """
2828
+ try:
2829
+ table = await self._get_table(table_type="spans", create_table_if_not_found=True)
2830
+ if table is None:
2831
+ return
2832
+
2833
+ async with self.async_session_factory() as sess, sess.begin():
2834
+ stmt = sqlite.insert(table).values(span.to_dict())
2835
+ await sess.execute(stmt)
2836
+
2837
+ except Exception as e:
2838
+ log_error(f"Error creating span: {e}")
2839
+
2840
+ async def create_spans(self, spans: List) -> None:
2841
+ """Create multiple spans in the database as a batch.
2842
+
2843
+ Args:
2844
+ spans: List of Span objects to store.
2845
+ """
2846
+ if not spans:
2847
+ return
2848
+
2849
+ try:
2850
+ table = await self._get_table(table_type="spans", create_table_if_not_found=True)
2851
+ if table is None:
2852
+ return
2853
+
2854
+ async with self.async_session_factory() as sess, sess.begin():
2855
+ for span in spans:
2856
+ stmt = sqlite.insert(table).values(span.to_dict())
2857
+ await sess.execute(stmt)
2858
+
2859
+ except Exception as e:
2860
+ log_error(f"Error creating spans batch: {e}")
2861
+
2862
+ async def get_span(self, span_id: str):
2863
+ """Get a single span by its span_id.
2864
+
2865
+ Args:
2866
+ span_id: The unique span identifier.
2867
+
2868
+ Returns:
2869
+ Optional[Span]: The span if found, None otherwise.
2870
+ """
2871
+ try:
2872
+ from agno.tracing.schemas import Span
2873
+
2874
+ table = await self._get_table(table_type="spans")
2875
+ if table is None:
2876
+ return None
2877
+
2878
+ async with self.async_session_factory() as sess:
2879
+ stmt = select(table).where(table.c.span_id == span_id)
2880
+ result = await sess.execute(stmt)
2881
+ row = result.fetchone()
2882
+ if row:
2883
+ return Span.from_dict(dict(row._mapping))
2884
+ return None
2885
+
2886
+ except Exception as e:
2887
+ log_error(f"Error getting span: {e}")
2888
+ return None
2889
+
2890
+ async def get_spans(
2891
+ self,
2892
+ trace_id: Optional[str] = None,
2893
+ parent_span_id: Optional[str] = None,
2894
+ limit: Optional[int] = 1000,
2895
+ ) -> List:
2896
+ """Get spans matching the provided filters.
2897
+
2898
+ Args:
2899
+ trace_id: Filter by trace ID.
2900
+ parent_span_id: Filter by parent span ID.
2901
+ limit: Maximum number of spans to return.
2902
+
2903
+ Returns:
2904
+ List[Span]: List of matching spans.
2905
+ """
2906
+ try:
2907
+ from agno.tracing.schemas import Span
2908
+
2909
+ table = await self._get_table(table_type="spans")
2910
+ if table is None:
2911
+ return []
2912
+
2913
+ async with self.async_session_factory() as sess:
2914
+ stmt = select(table)
2915
+
2916
+ # Apply filters
2917
+ if trace_id:
2918
+ stmt = stmt.where(table.c.trace_id == trace_id)
2919
+ if parent_span_id:
2920
+ stmt = stmt.where(table.c.parent_span_id == parent_span_id)
2921
+
2922
+ if limit:
2923
+ stmt = stmt.limit(limit)
2924
+
2925
+ result = await sess.execute(stmt)
2926
+ results = result.fetchall()
2927
+ return [Span.from_dict(dict(row._mapping)) for row in results]
2928
+
2929
+ except Exception as e:
2930
+ log_error(f"Error getting spans: {e}")
2931
+ return []
2932
+
2933
+ # -- Learning methods --
2934
+ async def get_learning(
2935
+ self,
2936
+ learning_type: str,
2937
+ user_id: Optional[str] = None,
2938
+ agent_id: Optional[str] = None,
2939
+ team_id: Optional[str] = None,
2940
+ workflow_id: Optional[str] = None,
2941
+ session_id: Optional[str] = None,
2942
+ namespace: Optional[str] = None,
2943
+ entity_id: Optional[str] = None,
2944
+ entity_type: Optional[str] = None,
2945
+ ) -> Optional[Dict[str, Any]]:
2946
+ """Retrieve a learning record.
2947
+
2948
+ Args:
2949
+ learning_type: Type of learning ('user_profile', 'session_context', etc.)
2950
+ user_id: Filter by user ID.
2951
+ agent_id: Filter by agent ID.
2952
+ team_id: Filter by team ID.
2953
+ workflow_id: Filter by workflow ID.
2954
+ session_id: Filter by session ID.
2955
+ namespace: Filter by namespace ('user', 'global', or custom).
2956
+ entity_id: Filter by entity ID (for entity-specific learnings).
2957
+ entity_type: Filter by entity type ('person', 'company', etc.).
2958
+
2959
+ Returns:
2960
+ Dict with 'content' key containing the learning data, or None.
2961
+ """
2962
+ try:
2963
+ table = await self._get_table(table_type="learnings")
2964
+ if table is None:
2965
+ return None
2966
+
2967
+ async with self.async_session_factory() as sess:
2968
+ stmt = select(table).where(table.c.learning_type == learning_type)
2969
+
2970
+ if user_id is not None:
2971
+ stmt = stmt.where(table.c.user_id == user_id)
2972
+ if agent_id is not None:
2973
+ stmt = stmt.where(table.c.agent_id == agent_id)
2974
+ if team_id is not None:
2975
+ stmt = stmt.where(table.c.team_id == team_id)
2976
+ if workflow_id is not None:
2977
+ stmt = stmt.where(table.c.workflow_id == workflow_id)
2978
+ if session_id is not None:
2979
+ stmt = stmt.where(table.c.session_id == session_id)
2980
+ if namespace is not None:
2981
+ stmt = stmt.where(table.c.namespace == namespace)
2982
+ if entity_id is not None:
2983
+ stmt = stmt.where(table.c.entity_id == entity_id)
2984
+ if entity_type is not None:
2985
+ stmt = stmt.where(table.c.entity_type == entity_type)
2986
+
2987
+ result = await sess.execute(stmt)
2988
+ row = result.fetchone()
2989
+ if row is None:
2990
+ return None
2991
+
2992
+ row_dict = dict(row._mapping)
2993
+ return {"content": row_dict.get("content")}
2994
+
2995
+ except Exception as e:
2996
+ log_debug(f"Error retrieving learning: {e}")
2997
+ return None
2998
+
2999
+ async def upsert_learning(
3000
+ self,
3001
+ id: str,
3002
+ learning_type: str,
3003
+ content: Dict[str, Any],
3004
+ user_id: Optional[str] = None,
3005
+ agent_id: Optional[str] = None,
3006
+ team_id: Optional[str] = None,
3007
+ workflow_id: Optional[str] = None,
3008
+ session_id: Optional[str] = None,
3009
+ namespace: Optional[str] = None,
3010
+ entity_id: Optional[str] = None,
3011
+ entity_type: Optional[str] = None,
3012
+ metadata: Optional[Dict[str, Any]] = None,
3013
+ ) -> None:
3014
+ """Insert or update a learning record.
3015
+
3016
+ Args:
3017
+ id: Unique identifier for the learning.
3018
+ learning_type: Type of learning ('user_profile', 'session_context', etc.)
3019
+ content: The learning content as a dict.
3020
+ user_id: Associated user ID.
3021
+ agent_id: Associated agent ID.
3022
+ team_id: Associated team ID.
3023
+ workflow_id: Associated workflow ID.
3024
+ session_id: Associated session ID.
3025
+ namespace: Namespace for scoping ('user', 'global', or custom).
3026
+ entity_id: Associated entity ID (for entity-specific learnings).
3027
+ entity_type: Entity type ('person', 'company', etc.).
3028
+ metadata: Optional metadata.
3029
+ """
3030
+ try:
3031
+ table = await self._get_table(table_type="learnings", create_table_if_not_found=True)
3032
+ if table is None:
3033
+ return
3034
+
3035
+ current_time = int(time.time())
3036
+
3037
+ async with self.async_session_factory() as sess, sess.begin():
3038
+ stmt = sqlite.insert(table).values(
3039
+ learning_id=id,
3040
+ learning_type=learning_type,
3041
+ namespace=namespace,
3042
+ user_id=user_id,
3043
+ agent_id=agent_id,
3044
+ team_id=team_id,
3045
+ workflow_id=workflow_id,
3046
+ session_id=session_id,
3047
+ entity_id=entity_id,
3048
+ entity_type=entity_type,
3049
+ content=content,
3050
+ metadata=metadata,
3051
+ created_at=current_time,
3052
+ updated_at=current_time,
3053
+ )
3054
+ stmt = stmt.on_conflict_do_update(
3055
+ index_elements=["learning_id"],
3056
+ set_=dict(
3057
+ content=content,
3058
+ metadata=metadata,
3059
+ updated_at=current_time,
3060
+ ),
3061
+ )
3062
+ await sess.execute(stmt)
3063
+
3064
+ log_debug(f"Upserted learning: {id}")
3065
+
3066
+ except Exception as e:
3067
+ log_debug(f"Error upserting learning: {e}")
3068
+
3069
+ async def delete_learning(self, id: str) -> bool:
3070
+ """Delete a learning record.
3071
+
3072
+ Args:
3073
+ id: The learning ID to delete.
3074
+
3075
+ Returns:
3076
+ True if deleted, False otherwise.
3077
+ """
3078
+ try:
3079
+ table = await self._get_table(table_type="learnings")
3080
+ if table is None:
3081
+ return False
3082
+
3083
+ async with self.async_session_factory() as sess, sess.begin():
3084
+ stmt = table.delete().where(table.c.learning_id == id)
3085
+ result = await sess.execute(stmt)
3086
+ return getattr(result, "rowcount", 0) > 0
3087
+
3088
+ except Exception as e:
3089
+ log_debug(f"Error deleting learning: {e}")
3090
+ return False
3091
+
3092
+ async def get_learnings(
3093
+ self,
3094
+ learning_type: Optional[str] = None,
3095
+ user_id: Optional[str] = None,
3096
+ agent_id: Optional[str] = None,
3097
+ team_id: Optional[str] = None,
3098
+ workflow_id: Optional[str] = None,
3099
+ session_id: Optional[str] = None,
3100
+ namespace: Optional[str] = None,
3101
+ entity_id: Optional[str] = None,
3102
+ entity_type: Optional[str] = None,
3103
+ limit: Optional[int] = None,
3104
+ ) -> List[Dict[str, Any]]:
3105
+ """Get multiple learning records.
3106
+
3107
+ Args:
3108
+ learning_type: Filter by learning type.
3109
+ user_id: Filter by user ID.
3110
+ agent_id: Filter by agent ID.
3111
+ team_id: Filter by team ID.
3112
+ workflow_id: Filter by workflow ID.
3113
+ session_id: Filter by session ID.
3114
+ namespace: Filter by namespace ('user', 'global', or custom).
3115
+ entity_id: Filter by entity ID (for entity-specific learnings).
3116
+ entity_type: Filter by entity type ('person', 'company', etc.).
3117
+ limit: Maximum number of records to return.
3118
+
3119
+ Returns:
3120
+ List of learning records.
3121
+ """
3122
+ try:
3123
+ table = await self._get_table(table_type="learnings")
3124
+ if table is None:
3125
+ return []
3126
+
3127
+ async with self.async_session_factory() as sess:
3128
+ stmt = select(table)
3129
+
3130
+ if learning_type is not None:
3131
+ stmt = stmt.where(table.c.learning_type == learning_type)
3132
+ if user_id is not None:
3133
+ stmt = stmt.where(table.c.user_id == user_id)
3134
+ if agent_id is not None:
3135
+ stmt = stmt.where(table.c.agent_id == agent_id)
3136
+ if team_id is not None:
3137
+ stmt = stmt.where(table.c.team_id == team_id)
3138
+ if workflow_id is not None:
3139
+ stmt = stmt.where(table.c.workflow_id == workflow_id)
3140
+ if session_id is not None:
3141
+ stmt = stmt.where(table.c.session_id == session_id)
3142
+ if namespace is not None:
3143
+ stmt = stmt.where(table.c.namespace == namespace)
3144
+ if entity_id is not None:
3145
+ stmt = stmt.where(table.c.entity_id == entity_id)
3146
+ if entity_type is not None:
3147
+ stmt = stmt.where(table.c.entity_type == entity_type)
3148
+
3149
+ stmt = stmt.order_by(table.c.updated_at.desc())
3150
+
3151
+ if limit is not None:
3152
+ stmt = stmt.limit(limit)
3153
+
3154
+ result = await sess.execute(stmt)
3155
+ results = result.fetchall()
3156
+ return [dict(row._mapping) for row in results]
3157
+
3158
+ except Exception as e:
3159
+ log_debug(f"Error getting learnings: {e}")
3160
+ return []
3161
+
3162
+ # --- Components (Not yet supported for async) ---
3163
+ def get_component(
3164
+ self,
3165
+ component_id: str,
3166
+ component_type: Optional[ComponentType] = None,
3167
+ ) -> Optional[Dict[str, Any]]:
3168
+ raise NotImplementedError("Component methods not yet supported for async databases")
3169
+
3170
+ def upsert_component(
3171
+ self,
3172
+ component_id: str,
3173
+ component_type: Optional[ComponentType] = None,
3174
+ name: Optional[str] = None,
3175
+ description: Optional[str] = None,
3176
+ metadata: Optional[Dict[str, Any]] = None,
3177
+ ) -> Dict[str, Any]:
3178
+ raise NotImplementedError("Component methods not yet supported for async databases")
3179
+
3180
+ def delete_component(
3181
+ self,
3182
+ component_id: str,
3183
+ hard_delete: bool = False,
3184
+ ) -> bool:
3185
+ raise NotImplementedError("Component methods not yet supported for async databases")
3186
+
3187
+ def list_components(
3188
+ self,
3189
+ component_type: Optional[ComponentType] = None,
3190
+ include_deleted: bool = False,
3191
+ limit: int = 20,
3192
+ offset: int = 0,
3193
+ ) -> Tuple[List[Dict[str, Any]], int]:
3194
+ raise NotImplementedError("Component methods not yet supported for async databases")
3195
+
3196
+ def create_component_with_config(
3197
+ self,
3198
+ component_id: str,
3199
+ component_type: ComponentType,
3200
+ name: Optional[str],
3201
+ config: Dict[str, Any],
3202
+ description: Optional[str] = None,
3203
+ metadata: Optional[Dict[str, Any]] = None,
3204
+ label: Optional[str] = None,
3205
+ stage: str = "draft",
3206
+ notes: Optional[str] = None,
3207
+ links: Optional[List[Dict[str, Any]]] = None,
3208
+ ) -> Tuple[Dict[str, Any], Dict[str, Any]]:
3209
+ raise NotImplementedError("Component methods not yet supported for async databases")
3210
+
3211
+ def get_config(
3212
+ self,
3213
+ component_id: str,
3214
+ version: Optional[int] = None,
3215
+ label: Optional[str] = None,
3216
+ ) -> Optional[Dict[str, Any]]:
3217
+ raise NotImplementedError("Component methods not yet supported for async databases")
3218
+
3219
+ def upsert_config(
3220
+ self,
3221
+ component_id: str,
3222
+ config: Optional[Dict[str, Any]] = None,
3223
+ version: Optional[int] = None,
3224
+ label: Optional[str] = None,
3225
+ stage: Optional[str] = None,
3226
+ notes: Optional[str] = None,
3227
+ links: Optional[List[Dict[str, Any]]] = None,
3228
+ ) -> Dict[str, Any]:
3229
+ raise NotImplementedError("Component methods not yet supported for async databases")
3230
+
3231
+ def delete_config(
3232
+ self,
3233
+ component_id: str,
3234
+ version: int,
3235
+ ) -> bool:
3236
+ raise NotImplementedError("Component methods not yet supported for async databases")
3237
+
3238
+ def list_configs(
3239
+ self,
3240
+ component_id: str,
3241
+ include_config: bool = False,
3242
+ ) -> List[Dict[str, Any]]:
3243
+ raise NotImplementedError("Component methods not yet supported for async databases")
3244
+
3245
+ def set_current_version(
3246
+ self,
3247
+ component_id: str,
3248
+ version: int,
3249
+ ) -> bool:
3250
+ raise NotImplementedError("Component methods not yet supported for async databases")
3251
+
3252
+ def get_links(
3253
+ self,
3254
+ component_id: str,
3255
+ version: int,
3256
+ link_kind: Optional[str] = None,
3257
+ ) -> List[Dict[str, Any]]:
3258
+ raise NotImplementedError("Component methods not yet supported for async databases")
3259
+
3260
+ def get_dependents(
3261
+ self,
3262
+ component_id: str,
3263
+ version: Optional[int] = None,
3264
+ ) -> List[Dict[str, Any]]:
3265
+ raise NotImplementedError("Component methods not yet supported for async databases")
3266
+
3267
+ def load_component_graph(
3268
+ self,
3269
+ component_id: str,
3270
+ version: Optional[int] = None,
3271
+ label: Optional[str] = None,
3272
+ ) -> Optional[Dict[str, Any]]:
3273
+ raise NotImplementedError("Component methods not yet supported for async databases")