agno 2.0.1__py3-none-any.whl → 2.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (314) hide show
  1. agno/agent/agent.py +6015 -2823
  2. agno/api/api.py +2 -0
  3. agno/api/os.py +1 -1
  4. agno/culture/__init__.py +3 -0
  5. agno/culture/manager.py +956 -0
  6. agno/db/async_postgres/__init__.py +3 -0
  7. agno/db/base.py +385 -6
  8. agno/db/dynamo/dynamo.py +388 -81
  9. agno/db/dynamo/schemas.py +47 -10
  10. agno/db/dynamo/utils.py +63 -4
  11. agno/db/firestore/firestore.py +435 -64
  12. agno/db/firestore/schemas.py +11 -0
  13. agno/db/firestore/utils.py +102 -4
  14. agno/db/gcs_json/gcs_json_db.py +384 -42
  15. agno/db/gcs_json/utils.py +60 -26
  16. agno/db/in_memory/in_memory_db.py +351 -66
  17. agno/db/in_memory/utils.py +60 -2
  18. agno/db/json/json_db.py +339 -48
  19. agno/db/json/utils.py +60 -26
  20. agno/db/migrations/manager.py +199 -0
  21. agno/db/migrations/v1_to_v2.py +510 -37
  22. agno/db/migrations/versions/__init__.py +0 -0
  23. agno/db/migrations/versions/v2_3_0.py +938 -0
  24. agno/db/mongo/__init__.py +15 -1
  25. agno/db/mongo/async_mongo.py +2036 -0
  26. agno/db/mongo/mongo.py +653 -76
  27. agno/db/mongo/schemas.py +13 -0
  28. agno/db/mongo/utils.py +80 -8
  29. agno/db/mysql/mysql.py +687 -25
  30. agno/db/mysql/schemas.py +61 -37
  31. agno/db/mysql/utils.py +60 -2
  32. agno/db/postgres/__init__.py +2 -1
  33. agno/db/postgres/async_postgres.py +2001 -0
  34. agno/db/postgres/postgres.py +676 -57
  35. agno/db/postgres/schemas.py +43 -18
  36. agno/db/postgres/utils.py +164 -2
  37. agno/db/redis/redis.py +344 -38
  38. agno/db/redis/schemas.py +18 -0
  39. agno/db/redis/utils.py +60 -2
  40. agno/db/schemas/__init__.py +2 -1
  41. agno/db/schemas/culture.py +120 -0
  42. agno/db/schemas/memory.py +13 -0
  43. agno/db/singlestore/schemas.py +26 -1
  44. agno/db/singlestore/singlestore.py +687 -53
  45. agno/db/singlestore/utils.py +60 -2
  46. agno/db/sqlite/__init__.py +2 -1
  47. agno/db/sqlite/async_sqlite.py +2371 -0
  48. agno/db/sqlite/schemas.py +24 -0
  49. agno/db/sqlite/sqlite.py +774 -85
  50. agno/db/sqlite/utils.py +168 -5
  51. agno/db/surrealdb/__init__.py +3 -0
  52. agno/db/surrealdb/metrics.py +292 -0
  53. agno/db/surrealdb/models.py +309 -0
  54. agno/db/surrealdb/queries.py +71 -0
  55. agno/db/surrealdb/surrealdb.py +1361 -0
  56. agno/db/surrealdb/utils.py +147 -0
  57. agno/db/utils.py +50 -22
  58. agno/eval/accuracy.py +50 -43
  59. agno/eval/performance.py +6 -3
  60. agno/eval/reliability.py +6 -3
  61. agno/eval/utils.py +33 -16
  62. agno/exceptions.py +68 -1
  63. agno/filters.py +354 -0
  64. agno/guardrails/__init__.py +6 -0
  65. agno/guardrails/base.py +19 -0
  66. agno/guardrails/openai.py +144 -0
  67. agno/guardrails/pii.py +94 -0
  68. agno/guardrails/prompt_injection.py +52 -0
  69. agno/integrations/discord/client.py +1 -0
  70. agno/knowledge/chunking/agentic.py +13 -10
  71. agno/knowledge/chunking/fixed.py +1 -1
  72. agno/knowledge/chunking/semantic.py +40 -8
  73. agno/knowledge/chunking/strategy.py +59 -15
  74. agno/knowledge/embedder/aws_bedrock.py +9 -4
  75. agno/knowledge/embedder/azure_openai.py +54 -0
  76. agno/knowledge/embedder/base.py +2 -0
  77. agno/knowledge/embedder/cohere.py +184 -5
  78. agno/knowledge/embedder/fastembed.py +1 -1
  79. agno/knowledge/embedder/google.py +79 -1
  80. agno/knowledge/embedder/huggingface.py +9 -4
  81. agno/knowledge/embedder/jina.py +63 -0
  82. agno/knowledge/embedder/mistral.py +78 -11
  83. agno/knowledge/embedder/nebius.py +1 -1
  84. agno/knowledge/embedder/ollama.py +13 -0
  85. agno/knowledge/embedder/openai.py +37 -65
  86. agno/knowledge/embedder/sentence_transformer.py +8 -4
  87. agno/knowledge/embedder/vllm.py +262 -0
  88. agno/knowledge/embedder/voyageai.py +69 -16
  89. agno/knowledge/knowledge.py +594 -186
  90. agno/knowledge/reader/base.py +9 -2
  91. agno/knowledge/reader/csv_reader.py +8 -10
  92. agno/knowledge/reader/docx_reader.py +5 -6
  93. agno/knowledge/reader/field_labeled_csv_reader.py +290 -0
  94. agno/knowledge/reader/json_reader.py +6 -5
  95. agno/knowledge/reader/markdown_reader.py +13 -13
  96. agno/knowledge/reader/pdf_reader.py +43 -68
  97. agno/knowledge/reader/pptx_reader.py +101 -0
  98. agno/knowledge/reader/reader_factory.py +51 -6
  99. agno/knowledge/reader/s3_reader.py +3 -15
  100. agno/knowledge/reader/tavily_reader.py +194 -0
  101. agno/knowledge/reader/text_reader.py +13 -13
  102. agno/knowledge/reader/web_search_reader.py +2 -43
  103. agno/knowledge/reader/website_reader.py +43 -25
  104. agno/knowledge/reranker/__init__.py +2 -8
  105. agno/knowledge/types.py +9 -0
  106. agno/knowledge/utils.py +20 -0
  107. agno/media.py +72 -0
  108. agno/memory/manager.py +336 -82
  109. agno/models/aimlapi/aimlapi.py +2 -2
  110. agno/models/anthropic/claude.py +183 -37
  111. agno/models/aws/bedrock.py +52 -112
  112. agno/models/aws/claude.py +33 -1
  113. agno/models/azure/ai_foundry.py +33 -15
  114. agno/models/azure/openai_chat.py +25 -8
  115. agno/models/base.py +999 -519
  116. agno/models/cerebras/cerebras.py +19 -13
  117. agno/models/cerebras/cerebras_openai.py +8 -5
  118. agno/models/cohere/chat.py +27 -1
  119. agno/models/cometapi/__init__.py +5 -0
  120. agno/models/cometapi/cometapi.py +57 -0
  121. agno/models/dashscope/dashscope.py +1 -0
  122. agno/models/deepinfra/deepinfra.py +2 -2
  123. agno/models/deepseek/deepseek.py +2 -2
  124. agno/models/fireworks/fireworks.py +2 -2
  125. agno/models/google/gemini.py +103 -31
  126. agno/models/groq/groq.py +28 -11
  127. agno/models/huggingface/huggingface.py +2 -1
  128. agno/models/internlm/internlm.py +2 -2
  129. agno/models/langdb/langdb.py +4 -4
  130. agno/models/litellm/chat.py +18 -1
  131. agno/models/litellm/litellm_openai.py +2 -2
  132. agno/models/llama_cpp/__init__.py +5 -0
  133. agno/models/llama_cpp/llama_cpp.py +22 -0
  134. agno/models/message.py +139 -0
  135. agno/models/meta/llama.py +27 -10
  136. agno/models/meta/llama_openai.py +5 -17
  137. agno/models/nebius/nebius.py +6 -6
  138. agno/models/nexus/__init__.py +3 -0
  139. agno/models/nexus/nexus.py +22 -0
  140. agno/models/nvidia/nvidia.py +2 -2
  141. agno/models/ollama/chat.py +59 -5
  142. agno/models/openai/chat.py +69 -29
  143. agno/models/openai/responses.py +103 -106
  144. agno/models/openrouter/openrouter.py +41 -3
  145. agno/models/perplexity/perplexity.py +4 -5
  146. agno/models/portkey/portkey.py +3 -3
  147. agno/models/requesty/__init__.py +5 -0
  148. agno/models/requesty/requesty.py +52 -0
  149. agno/models/response.py +77 -1
  150. agno/models/sambanova/sambanova.py +2 -2
  151. agno/models/siliconflow/__init__.py +5 -0
  152. agno/models/siliconflow/siliconflow.py +25 -0
  153. agno/models/together/together.py +2 -2
  154. agno/models/utils.py +254 -8
  155. agno/models/vercel/v0.py +2 -2
  156. agno/models/vertexai/__init__.py +0 -0
  157. agno/models/vertexai/claude.py +96 -0
  158. agno/models/vllm/vllm.py +1 -0
  159. agno/models/xai/xai.py +3 -2
  160. agno/os/app.py +543 -178
  161. agno/os/auth.py +24 -14
  162. agno/os/config.py +1 -0
  163. agno/os/interfaces/__init__.py +1 -0
  164. agno/os/interfaces/a2a/__init__.py +3 -0
  165. agno/os/interfaces/a2a/a2a.py +42 -0
  166. agno/os/interfaces/a2a/router.py +250 -0
  167. agno/os/interfaces/a2a/utils.py +924 -0
  168. agno/os/interfaces/agui/agui.py +23 -7
  169. agno/os/interfaces/agui/router.py +27 -3
  170. agno/os/interfaces/agui/utils.py +242 -142
  171. agno/os/interfaces/base.py +6 -2
  172. agno/os/interfaces/slack/router.py +81 -23
  173. agno/os/interfaces/slack/slack.py +29 -14
  174. agno/os/interfaces/whatsapp/router.py +11 -4
  175. agno/os/interfaces/whatsapp/whatsapp.py +14 -7
  176. agno/os/mcp.py +111 -54
  177. agno/os/middleware/__init__.py +7 -0
  178. agno/os/middleware/jwt.py +233 -0
  179. agno/os/router.py +556 -139
  180. agno/os/routers/evals/evals.py +71 -34
  181. agno/os/routers/evals/schemas.py +31 -31
  182. agno/os/routers/evals/utils.py +6 -5
  183. agno/os/routers/health.py +31 -0
  184. agno/os/routers/home.py +52 -0
  185. agno/os/routers/knowledge/knowledge.py +185 -38
  186. agno/os/routers/knowledge/schemas.py +82 -22
  187. agno/os/routers/memory/memory.py +158 -53
  188. agno/os/routers/memory/schemas.py +20 -16
  189. agno/os/routers/metrics/metrics.py +20 -8
  190. agno/os/routers/metrics/schemas.py +16 -16
  191. agno/os/routers/session/session.py +499 -38
  192. agno/os/schema.py +308 -198
  193. agno/os/utils.py +401 -41
  194. agno/reasoning/anthropic.py +80 -0
  195. agno/reasoning/azure_ai_foundry.py +2 -2
  196. agno/reasoning/deepseek.py +2 -2
  197. agno/reasoning/default.py +3 -1
  198. agno/reasoning/gemini.py +73 -0
  199. agno/reasoning/groq.py +2 -2
  200. agno/reasoning/ollama.py +2 -2
  201. agno/reasoning/openai.py +7 -2
  202. agno/reasoning/vertexai.py +76 -0
  203. agno/run/__init__.py +6 -0
  204. agno/run/agent.py +248 -94
  205. agno/run/base.py +44 -5
  206. agno/run/team.py +238 -97
  207. agno/run/workflow.py +144 -33
  208. agno/session/agent.py +105 -89
  209. agno/session/summary.py +65 -25
  210. agno/session/team.py +176 -96
  211. agno/session/workflow.py +406 -40
  212. agno/team/team.py +3854 -1610
  213. agno/tools/dalle.py +2 -4
  214. agno/tools/decorator.py +4 -2
  215. agno/tools/duckduckgo.py +15 -11
  216. agno/tools/e2b.py +14 -7
  217. agno/tools/eleven_labs.py +23 -25
  218. agno/tools/exa.py +21 -16
  219. agno/tools/file.py +153 -23
  220. agno/tools/file_generation.py +350 -0
  221. agno/tools/firecrawl.py +4 -4
  222. agno/tools/function.py +250 -30
  223. agno/tools/gmail.py +238 -14
  224. agno/tools/google_drive.py +270 -0
  225. agno/tools/googlecalendar.py +36 -8
  226. agno/tools/googlesheets.py +20 -5
  227. agno/tools/jira.py +20 -0
  228. agno/tools/knowledge.py +3 -3
  229. agno/tools/mcp/__init__.py +10 -0
  230. agno/tools/mcp/mcp.py +331 -0
  231. agno/tools/mcp/multi_mcp.py +347 -0
  232. agno/tools/mcp/params.py +24 -0
  233. agno/tools/mcp_toolbox.py +284 -0
  234. agno/tools/mem0.py +11 -17
  235. agno/tools/memori.py +1 -53
  236. agno/tools/memory.py +419 -0
  237. agno/tools/models/nebius.py +5 -5
  238. agno/tools/models_labs.py +20 -10
  239. agno/tools/notion.py +204 -0
  240. agno/tools/parallel.py +314 -0
  241. agno/tools/scrapegraph.py +58 -31
  242. agno/tools/searxng.py +2 -2
  243. agno/tools/serper.py +2 -2
  244. agno/tools/slack.py +18 -3
  245. agno/tools/spider.py +2 -2
  246. agno/tools/tavily.py +146 -0
  247. agno/tools/whatsapp.py +1 -1
  248. agno/tools/workflow.py +278 -0
  249. agno/tools/yfinance.py +12 -11
  250. agno/utils/agent.py +820 -0
  251. agno/utils/audio.py +27 -0
  252. agno/utils/common.py +90 -1
  253. agno/utils/events.py +217 -2
  254. agno/utils/gemini.py +180 -22
  255. agno/utils/hooks.py +57 -0
  256. agno/utils/http.py +111 -0
  257. agno/utils/knowledge.py +12 -5
  258. agno/utils/log.py +1 -0
  259. agno/utils/mcp.py +92 -2
  260. agno/utils/media.py +188 -10
  261. agno/utils/merge_dict.py +22 -1
  262. agno/utils/message.py +60 -0
  263. agno/utils/models/claude.py +40 -11
  264. agno/utils/print_response/agent.py +105 -21
  265. agno/utils/print_response/team.py +103 -38
  266. agno/utils/print_response/workflow.py +251 -34
  267. agno/utils/reasoning.py +22 -1
  268. agno/utils/serialize.py +32 -0
  269. agno/utils/streamlit.py +16 -10
  270. agno/utils/string.py +41 -0
  271. agno/utils/team.py +98 -9
  272. agno/utils/tools.py +1 -1
  273. agno/vectordb/base.py +23 -4
  274. agno/vectordb/cassandra/cassandra.py +65 -9
  275. agno/vectordb/chroma/chromadb.py +182 -38
  276. agno/vectordb/clickhouse/clickhousedb.py +64 -11
  277. agno/vectordb/couchbase/couchbase.py +105 -10
  278. agno/vectordb/lancedb/lance_db.py +124 -133
  279. agno/vectordb/langchaindb/langchaindb.py +25 -7
  280. agno/vectordb/lightrag/lightrag.py +17 -3
  281. agno/vectordb/llamaindex/__init__.py +3 -0
  282. agno/vectordb/llamaindex/llamaindexdb.py +46 -7
  283. agno/vectordb/milvus/milvus.py +126 -9
  284. agno/vectordb/mongodb/__init__.py +7 -1
  285. agno/vectordb/mongodb/mongodb.py +112 -7
  286. agno/vectordb/pgvector/pgvector.py +142 -21
  287. agno/vectordb/pineconedb/pineconedb.py +80 -8
  288. agno/vectordb/qdrant/qdrant.py +125 -39
  289. agno/vectordb/redis/__init__.py +9 -0
  290. agno/vectordb/redis/redisdb.py +694 -0
  291. agno/vectordb/singlestore/singlestore.py +111 -25
  292. agno/vectordb/surrealdb/surrealdb.py +31 -5
  293. agno/vectordb/upstashdb/upstashdb.py +76 -8
  294. agno/vectordb/weaviate/weaviate.py +86 -15
  295. agno/workflow/__init__.py +2 -0
  296. agno/workflow/agent.py +299 -0
  297. agno/workflow/condition.py +112 -18
  298. agno/workflow/loop.py +69 -10
  299. agno/workflow/parallel.py +266 -118
  300. agno/workflow/router.py +110 -17
  301. agno/workflow/step.py +638 -129
  302. agno/workflow/steps.py +65 -6
  303. agno/workflow/types.py +61 -23
  304. agno/workflow/workflow.py +2085 -272
  305. {agno-2.0.1.dist-info → agno-2.3.0.dist-info}/METADATA +182 -58
  306. agno-2.3.0.dist-info/RECORD +577 -0
  307. agno/knowledge/reader/url_reader.py +0 -128
  308. agno/tools/googlesearch.py +0 -98
  309. agno/tools/mcp.py +0 -610
  310. agno/utils/models/aws_claude.py +0 -170
  311. agno-2.0.1.dist-info/RECORD +0 -515
  312. {agno-2.0.1.dist-info → agno-2.3.0.dist-info}/WHEEL +0 -0
  313. {agno-2.0.1.dist-info → agno-2.3.0.dist-info}/licenses/LICENSE +0 -0
  314. {agno-2.0.1.dist-info → agno-2.3.0.dist-info}/top_level.txt +0 -0
@@ -5,6 +5,8 @@ from typing import Any, Dict, List, Optional, Tuple, Union
5
5
  from uuid import uuid4
6
6
 
7
7
  from agno.db.base import BaseDb, SessionType
8
+ from agno.db.migrations.manager import MigrationManager
9
+ from agno.db.schemas.culture import CulturalKnowledge
8
10
  from agno.db.schemas.evals import EvalFilterType, EvalRunRecord, EvalType
9
11
  from agno.db.schemas.knowledge import KnowledgeRow
10
12
  from agno.db.schemas.memory import UserMemory
@@ -14,21 +16,24 @@ from agno.db.singlestore.utils import (
14
16
  bulk_upsert_metrics,
15
17
  calculate_date_metrics,
16
18
  create_schema,
19
+ deserialize_cultural_knowledge_from_db,
17
20
  fetch_all_sessions_data,
18
21
  get_dates_to_calculate_metrics_for,
19
22
  is_table_available,
20
23
  is_valid_table,
24
+ serialize_cultural_knowledge_for_db,
21
25
  )
22
26
  from agno.session import AgentSession, Session, TeamSession, WorkflowSession
23
27
  from agno.utils.log import log_debug, log_error, log_info, log_warning
28
+ from agno.utils.string import generate_id
24
29
 
25
30
  try:
26
- from sqlalchemy import Index, UniqueConstraint, and_, func, update
31
+ from sqlalchemy import Index, UniqueConstraint, and_, func, select, update
27
32
  from sqlalchemy.dialects import mysql
28
33
  from sqlalchemy.engine import Engine, create_engine
29
34
  from sqlalchemy.orm import scoped_session, sessionmaker
30
35
  from sqlalchemy.schema import Column, MetaData, Table
31
- from sqlalchemy.sql.expression import select, text
36
+ from sqlalchemy.sql.expression import text
32
37
  except ImportError:
33
38
  raise ImportError("`sqlalchemy` not installed. Please install it using `pip install sqlalchemy`")
34
39
 
@@ -41,10 +46,12 @@ class SingleStoreDb(BaseDb):
41
46
  db_schema: Optional[str] = None,
42
47
  db_url: Optional[str] = None,
43
48
  session_table: Optional[str] = None,
49
+ culture_table: Optional[str] = None,
44
50
  memory_table: Optional[str] = None,
45
51
  metrics_table: Optional[str] = None,
46
52
  eval_table: Optional[str] = None,
47
53
  knowledge_table: Optional[str] = None,
54
+ versions_table: Optional[str] = None,
48
55
  ):
49
56
  """
50
57
  Interface for interacting with a SingleStore database.
@@ -60,22 +67,31 @@ class SingleStoreDb(BaseDb):
60
67
  db_schema (Optional[str]): The database schema to use.
61
68
  db_url (Optional[str]): The database URL to connect to.
62
69
  session_table (Optional[str]): Name of the table to store Agent, Team and Workflow sessions.
70
+ culture_table (Optional[str]): Name of the table to store cultural knowledge.
63
71
  memory_table (Optional[str]): Name of the table to store memories.
64
72
  metrics_table (Optional[str]): Name of the table to store metrics.
65
73
  eval_table (Optional[str]): Name of the table to store evaluation runs data.
66
74
  knowledge_table (Optional[str]): Name of the table to store knowledge content.
67
-
75
+ versions_table (Optional[str]): Name of the table to store schema versions.
68
76
  Raises:
69
77
  ValueError: If neither db_url nor db_engine is provided.
70
78
  ValueError: If none of the tables are provided.
71
79
  """
80
+ if id is None:
81
+ base_seed = db_url or str(db_engine.url) if db_engine else "singlestore" # type: ignore
82
+ schema_suffix = db_schema if db_schema is not None else "ai"
83
+ seed = f"{base_seed}#{schema_suffix}"
84
+ id = generate_id(seed)
85
+
72
86
  super().__init__(
73
87
  id=id,
74
88
  session_table=session_table,
89
+ culture_table=culture_table,
75
90
  memory_table=memory_table,
76
91
  metrics_table=metrics_table,
77
92
  eval_table=eval_table,
78
93
  knowledge_table=knowledge_table,
94
+ versions_table=versions_table,
79
95
  )
80
96
 
81
97
  _engine: Optional[Engine] = db_engine
@@ -99,6 +115,17 @@ class SingleStoreDb(BaseDb):
99
115
  self.Session: scoped_session = scoped_session(sessionmaker(bind=self.db_engine))
100
116
 
101
117
  # -- DB methods --
118
+ def table_exists(self, table_name: str) -> bool:
119
+ """Check if a table with the given name exists in the SingleStore database.
120
+
121
+ Args:
122
+ table_name: Name of the table to check
123
+
124
+ Returns:
125
+ bool: True if the table exists in the database, False otherwise
126
+ """
127
+ with self.Session() as sess:
128
+ return is_table_available(session=sess, table_name=table_name, db_schema=self.db_schema)
102
129
 
103
130
  def _create_table_structure_only(self, table_name: str, table_type: str, db_schema: Optional[str]) -> Table:
104
131
  """
@@ -144,6 +171,25 @@ class SingleStoreDb(BaseDb):
144
171
  log_error(f"Could not create table structure for {table_ref}: {e}")
145
172
  raise
146
173
 
174
+ def _create_all_tables(self):
175
+ """Create all tables for the database."""
176
+ tables_to_create = [
177
+ (self.session_table_name, "sessions"),
178
+ (self.memory_table_name, "memories"),
179
+ (self.metrics_table_name, "metrics"),
180
+ (self.eval_table_name, "evals"),
181
+ (self.knowledge_table_name, "knowledge"),
182
+ (self.versions_table_name, "versions"),
183
+ ]
184
+
185
+ for table_name, table_type in tables_to_create:
186
+ if table_name != self.versions_table_name:
187
+ # Also store the schema version for the created table
188
+ latest_schema_version = MigrationManager(self).latest_schema_version
189
+ self.upsert_schema_version(table_name=table_name, version=latest_schema_version.public)
190
+
191
+ self._create_table(table_name=table_name, table_type=table_type, db_schema=self.db_schema)
192
+
147
193
  def _create_table(self, table_name: str, table_type: str, db_schema: Optional[str]) -> Table:
148
194
  """
149
195
  Create a table with the appropriate schema based on the table type.
@@ -156,11 +202,11 @@ class SingleStoreDb(BaseDb):
156
202
  Returns:
157
203
  Table: SQLAlchemy Table object
158
204
  """
205
+ table_ref = f"{db_schema}.{table_name}" if db_schema else table_name
159
206
  try:
160
207
  table_schema = get_table_schema_definition(table_type)
161
208
 
162
- table_ref = f"{db_schema}.{table_name}" if db_schema else table_name
163
- log_debug(f"Creating table {table_ref} with schema: {table_schema}")
209
+ log_debug(f"Creating table {table_ref}")
164
210
 
165
211
  columns: List[Column] = []
166
212
  indexes: List[str] = []
@@ -309,8 +355,62 @@ class SingleStoreDb(BaseDb):
309
355
  )
310
356
  return self.knowledge_table
311
357
 
358
+ if table_type == "culture":
359
+ self.culture_table = self._get_or_create_table(
360
+ table_name=self.culture_table_name,
361
+ table_type="culture",
362
+ db_schema=self.db_schema,
363
+ create_table_if_not_found=create_table_if_not_found,
364
+ )
365
+ return self.culture_table
366
+
367
+ if table_type == "versions":
368
+ self.versions_table = self._get_or_create_table(
369
+ table_name=self.versions_table_name,
370
+ table_type="versions",
371
+ db_schema=self.db_schema,
372
+ create_table_if_not_found=create_table_if_not_found,
373
+ )
374
+ return self.versions_table
375
+
312
376
  raise ValueError(f"Unknown table type: {table_type}")
313
377
 
378
+ def get_latest_schema_version(self, table_name: str) -> str:
379
+ """Get the latest version of the database schema."""
380
+ table = self._get_table(table_type="versions", create_table_if_not_found=True)
381
+ if table is None:
382
+ return "2.0.0"
383
+ with self.Session() as sess:
384
+ stmt = select(table)
385
+ # Latest version for the given table
386
+ stmt = stmt.where(table.c.table_name == table_name)
387
+ stmt = stmt.order_by(table.c.version.desc()).limit(1)
388
+ result = sess.execute(stmt).fetchone()
389
+ if result is None:
390
+ return "2.0.0"
391
+ version_dict = dict(result._mapping)
392
+ return version_dict.get("version") or "2.0.0"
393
+
394
+ def upsert_schema_version(self, table_name: str, version: str) -> None:
395
+ """Upsert the schema version into the database."""
396
+ table = self._get_table(table_type="versions", create_table_if_not_found=True)
397
+ if table is None:
398
+ return
399
+ current_datetime = datetime.now().isoformat()
400
+ with self.Session() as sess, sess.begin():
401
+ stmt = mysql.insert(table).values(
402
+ table_name=table_name,
403
+ version=version,
404
+ created_at=current_datetime, # Store as ISO format string
405
+ updated_at=current_datetime,
406
+ )
407
+ # Update version if table_name already exists
408
+ stmt = stmt.on_duplicate_key_update(
409
+ version=version,
410
+ updated_at=current_datetime,
411
+ )
412
+ sess.execute(stmt)
413
+
314
414
  def _get_or_create_table(
315
415
  self,
316
416
  table_name: str,
@@ -336,6 +436,12 @@ class SingleStoreDb(BaseDb):
336
436
  if not table_is_available:
337
437
  if not create_table_if_not_found:
338
438
  return None
439
+
440
+ # Also store the schema version for the created table
441
+ if table_name != self.versions_table_name:
442
+ latest_schema_version = MigrationManager(self).latest_schema_version
443
+ self.upsert_schema_version(table_name=table_name, version=latest_schema_version.public)
444
+
339
445
  return self._create_table(table_name=table_name, table_type=table_type, db_schema=db_schema)
340
446
 
341
447
  if not is_valid_table(
@@ -386,7 +492,7 @@ class SingleStoreDb(BaseDb):
386
492
 
387
493
  except Exception as e:
388
494
  log_error(f"Error deleting session: {e}")
389
- return False
495
+ raise e
390
496
 
391
497
  def delete_sessions(self, session_ids: List[str]) -> None:
392
498
  """Delete all given sessions from the database.
@@ -411,6 +517,7 @@ class SingleStoreDb(BaseDb):
411
517
 
412
518
  except Exception as e:
413
519
  log_error(f"Error deleting sessions: {e}")
520
+ raise e
414
521
 
415
522
  def get_session(
416
523
  self,
@@ -424,8 +531,8 @@ class SingleStoreDb(BaseDb):
424
531
 
425
532
  Args:
426
533
  session_id (str): ID of the session to read.
534
+ session_type (SessionType): Type of session to get.
427
535
  user_id (Optional[str]): User ID to filter by. Defaults to None.
428
- session_type (Optional[SessionType]): Type of session to read. Defaults to None.
429
536
  deserialize (Optional[bool]): Whether to serialize the session. Defaults to True.
430
537
 
431
538
  Returns:
@@ -446,9 +553,6 @@ class SingleStoreDb(BaseDb):
446
553
 
447
554
  if user_id is not None:
448
555
  stmt = stmt.where(table.c.user_id == user_id)
449
- if session_type is not None:
450
- session_type_value = session_type.value if isinstance(session_type, SessionType) else session_type
451
- stmt = stmt.where(table.c.session_type == session_type_value)
452
556
  result = sess.execute(stmt).fetchone()
453
557
  if result is None:
454
558
  return None
@@ -469,7 +573,7 @@ class SingleStoreDb(BaseDb):
469
573
 
470
574
  except Exception as e:
471
575
  log_error(f"Exception reading from session table: {e}")
472
- return None
576
+ raise e
473
577
 
474
578
  def get_sessions(
475
579
  self,
@@ -489,7 +593,7 @@ class SingleStoreDb(BaseDb):
489
593
  Get all sessions in the given table. Can filter by user_id and entity_id.
490
594
 
491
595
  Args:
492
- session_type (Optional[SessionType]): The type of session to filter by. Defaults to None.
596
+ session_type (Optional[SessionType]): The type of session to filter by.
493
597
  user_id (Optional[str]): The ID of the user to filter by.
494
598
  component_id (Optional[str]): The ID of the agent / workflow to filter by.
495
599
  session_name (Optional[str]): The name of the session to filter by.
@@ -573,8 +677,8 @@ class SingleStoreDb(BaseDb):
573
677
  raise ValueError(f"Invalid session type: {session_type}")
574
678
 
575
679
  except Exception as e:
576
- log_debug(f"Exception reading from session table: {e}")
577
- return []
680
+ log_error(f"Exception reading from session table: {e}")
681
+ raise e
578
682
 
579
683
  def rename_session(
580
684
  self, session_id: str, session_type: SessionType, session_name: str, deserialize: Optional[bool] = True
@@ -636,7 +740,7 @@ class SingleStoreDb(BaseDb):
636
740
 
637
741
  except Exception as e:
638
742
  log_error(f"Error renaming session: {e}")
639
- return None
743
+ raise e
640
744
 
641
745
  def upsert_session(self, session: Session, deserialize: Optional[bool] = True) -> Optional[Session]:
642
746
  """
@@ -786,14 +890,214 @@ class SingleStoreDb(BaseDb):
786
890
 
787
891
  except Exception as e:
788
892
  log_error(f"Error upserting into sessions table: {e}")
789
- return None
893
+ raise e
894
+
895
+ def upsert_sessions(
896
+ self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
897
+ ) -> List[Union[Session, Dict[str, Any]]]:
898
+ """
899
+ Bulk upsert multiple sessions for improved performance on large datasets.
900
+
901
+ Args:
902
+ sessions (List[Session]): List of sessions to upsert.
903
+ deserialize (Optional[bool]): Whether to deserialize the sessions. Defaults to True.
904
+
905
+ Returns:
906
+ List[Union[Session, Dict[str, Any]]]: List of upserted sessions.
907
+
908
+ Raises:
909
+ Exception: If an error occurs during bulk upsert.
910
+ """
911
+ if not sessions:
912
+ return []
913
+
914
+ try:
915
+ table = self._get_table(table_type="sessions", create_table_if_not_found=True)
916
+ if table is None:
917
+ return []
918
+
919
+ # Group sessions by type for batch processing
920
+ agent_sessions = []
921
+ team_sessions = []
922
+ workflow_sessions = []
923
+
924
+ for session in sessions:
925
+ if isinstance(session, AgentSession):
926
+ agent_sessions.append(session)
927
+ elif isinstance(session, TeamSession):
928
+ team_sessions.append(session)
929
+ elif isinstance(session, WorkflowSession):
930
+ workflow_sessions.append(session)
931
+
932
+ results: List[Union[Session, Dict[str, Any]]] = []
933
+
934
+ with self.Session() as sess, sess.begin():
935
+ # Bulk upsert agent sessions
936
+ if agent_sessions:
937
+ agent_data = []
938
+ for session in agent_sessions:
939
+ session_dict = session.to_dict()
940
+ # Use preserved updated_at if flag is set, otherwise use current time
941
+ updated_at = session_dict.get("updated_at") if preserve_updated_at else int(time.time())
942
+ agent_data.append(
943
+ {
944
+ "session_id": session_dict.get("session_id"),
945
+ "session_type": SessionType.AGENT.value,
946
+ "agent_id": session_dict.get("agent_id"),
947
+ "user_id": session_dict.get("user_id"),
948
+ "runs": session_dict.get("runs"),
949
+ "agent_data": session_dict.get("agent_data"),
950
+ "session_data": session_dict.get("session_data"),
951
+ "summary": session_dict.get("summary"),
952
+ "metadata": session_dict.get("metadata"),
953
+ "created_at": session_dict.get("created_at"),
954
+ "updated_at": updated_at,
955
+ }
956
+ )
957
+
958
+ if agent_data:
959
+ stmt = mysql.insert(table)
960
+ stmt = stmt.on_duplicate_key_update(
961
+ agent_id=stmt.inserted.agent_id,
962
+ user_id=stmt.inserted.user_id,
963
+ agent_data=stmt.inserted.agent_data,
964
+ session_data=stmt.inserted.session_data,
965
+ summary=stmt.inserted.summary,
966
+ metadata=stmt.inserted.metadata,
967
+ runs=stmt.inserted.runs,
968
+ updated_at=stmt.inserted.updated_at,
969
+ )
970
+ sess.execute(stmt, agent_data)
971
+
972
+ # Fetch the results for agent sessions
973
+ agent_ids = [session.session_id for session in agent_sessions]
974
+ select_stmt = select(table).where(table.c.session_id.in_(agent_ids))
975
+ result = sess.execute(select_stmt).fetchall()
976
+
977
+ for row in result:
978
+ if deserialize:
979
+ deserialized_session = AgentSession.from_dict(session_dict)
980
+ if deserialized_session is None:
981
+ continue
982
+ results.append(deserialized_session)
983
+ else:
984
+ results.append(dict(row._mapping))
985
+
986
+ # Bulk upsert team sessions
987
+ if team_sessions:
988
+ team_data = []
989
+ for session in team_sessions:
990
+ session_dict = session.to_dict()
991
+ # Use preserved updated_at if flag is set, otherwise use current time
992
+ updated_at = session_dict.get("updated_at") if preserve_updated_at else int(time.time())
993
+ team_data.append(
994
+ {
995
+ "session_id": session_dict.get("session_id"),
996
+ "session_type": SessionType.TEAM.value,
997
+ "team_id": session_dict.get("team_id"),
998
+ "user_id": session_dict.get("user_id"),
999
+ "runs": session_dict.get("runs"),
1000
+ "team_data": session_dict.get("team_data"),
1001
+ "session_data": session_dict.get("session_data"),
1002
+ "summary": session_dict.get("summary"),
1003
+ "metadata": session_dict.get("metadata"),
1004
+ "created_at": session_dict.get("created_at"),
1005
+ "updated_at": updated_at,
1006
+ }
1007
+ )
1008
+
1009
+ if team_data:
1010
+ stmt = mysql.insert(table)
1011
+ stmt = stmt.on_duplicate_key_update(
1012
+ team_id=stmt.inserted.team_id,
1013
+ user_id=stmt.inserted.user_id,
1014
+ team_data=stmt.inserted.team_data,
1015
+ session_data=stmt.inserted.session_data,
1016
+ summary=stmt.inserted.summary,
1017
+ metadata=stmt.inserted.metadata,
1018
+ runs=stmt.inserted.runs,
1019
+ updated_at=stmt.inserted.updated_at,
1020
+ )
1021
+ sess.execute(stmt, team_data)
1022
+
1023
+ # Fetch the results for team sessions
1024
+ team_ids = [session.session_id for session in team_sessions]
1025
+ select_stmt = select(table).where(table.c.session_id.in_(team_ids))
1026
+ result = sess.execute(select_stmt).fetchall()
1027
+
1028
+ for row in result:
1029
+ if deserialize:
1030
+ deserialized_team_session = TeamSession.from_dict(session_dict)
1031
+ if deserialized_team_session is None:
1032
+ continue
1033
+ results.append(deserialized_team_session)
1034
+ else:
1035
+ results.append(dict(row._mapping))
1036
+
1037
+ # Bulk upsert workflow sessions
1038
+ if workflow_sessions:
1039
+ workflow_data = []
1040
+ for session in workflow_sessions:
1041
+ session_dict = session.to_dict()
1042
+ # Use preserved updated_at if flag is set, otherwise use current time
1043
+ updated_at = session_dict.get("updated_at") if preserve_updated_at else int(time.time())
1044
+ workflow_data.append(
1045
+ {
1046
+ "session_id": session_dict.get("session_id"),
1047
+ "session_type": SessionType.WORKFLOW.value,
1048
+ "workflow_id": session_dict.get("workflow_id"),
1049
+ "user_id": session_dict.get("user_id"),
1050
+ "runs": session_dict.get("runs"),
1051
+ "workflow_data": session_dict.get("workflow_data"),
1052
+ "session_data": session_dict.get("session_data"),
1053
+ "summary": session_dict.get("summary"),
1054
+ "metadata": session_dict.get("metadata"),
1055
+ "created_at": session_dict.get("created_at"),
1056
+ "updated_at": updated_at,
1057
+ }
1058
+ )
1059
+
1060
+ if workflow_data:
1061
+ stmt = mysql.insert(table)
1062
+ stmt = stmt.on_duplicate_key_update(
1063
+ workflow_id=stmt.inserted.workflow_id,
1064
+ user_id=stmt.inserted.user_id,
1065
+ workflow_data=stmt.inserted.workflow_data,
1066
+ session_data=stmt.inserted.session_data,
1067
+ summary=stmt.inserted.summary,
1068
+ metadata=stmt.inserted.metadata,
1069
+ runs=stmt.inserted.runs,
1070
+ updated_at=stmt.inserted.updated_at,
1071
+ )
1072
+ sess.execute(stmt, workflow_data)
1073
+
1074
+ # Fetch the results for workflow sessions
1075
+ workflow_ids = [session.session_id for session in workflow_sessions]
1076
+ select_stmt = select(table).where(table.c.session_id.in_(workflow_ids))
1077
+ result = sess.execute(select_stmt).fetchall()
1078
+
1079
+ for row in result:
1080
+ if deserialize:
1081
+ deserialized_workflow_session = WorkflowSession.from_dict(session_dict)
1082
+ if deserialized_workflow_session is None:
1083
+ continue
1084
+ results.append(deserialized_workflow_session)
1085
+ else:
1086
+ results.append(dict(row._mapping))
1087
+
1088
+ return results
1089
+
1090
+ except Exception as e:
1091
+ log_error(f"Exception during bulk session upsert: {e}")
1092
+ return []
790
1093
 
791
1094
  # -- Memory methods --
792
- def delete_user_memory(self, memory_id: str):
1095
+ def delete_user_memory(self, memory_id: str, user_id: Optional[str] = None):
793
1096
  """Delete a user memory from the database.
794
1097
 
795
1098
  Args:
796
1099
  memory_id (str): The ID of the memory to delete.
1100
+ user_id (Optional[str]): The ID of the user to filter by. Defaults to None.
797
1101
 
798
1102
  Returns:
799
1103
  bool: True if deletion was successful, False otherwise.
@@ -808,6 +1112,8 @@ class SingleStoreDb(BaseDb):
808
1112
 
809
1113
  with self.Session() as sess, sess.begin():
810
1114
  delete_stmt = table.delete().where(table.c.memory_id == memory_id)
1115
+ if user_id is not None:
1116
+ delete_stmt = delete_stmt.where(table.c.user_id == user_id)
811
1117
  result = sess.execute(delete_stmt)
812
1118
 
813
1119
  success = result.rowcount > 0
@@ -818,12 +1124,14 @@ class SingleStoreDb(BaseDb):
818
1124
 
819
1125
  except Exception as e:
820
1126
  log_error(f"Error deleting memory: {e}")
1127
+ raise e
821
1128
 
822
- def delete_user_memories(self, memory_ids: List[str]) -> None:
1129
+ def delete_user_memories(self, memory_ids: List[str], user_id: Optional[str] = None) -> None:
823
1130
  """Delete user memories from the database.
824
1131
 
825
1132
  Args:
826
1133
  memory_ids (List[str]): The IDs of the memories to delete.
1134
+ user_id (Optional[str]): The ID of the user to filter by. Defaults to None.
827
1135
 
828
1136
  Raises:
829
1137
  Exception: If an error occurs during deletion.
@@ -835,12 +1143,15 @@ class SingleStoreDb(BaseDb):
835
1143
 
836
1144
  with self.Session() as sess, sess.begin():
837
1145
  delete_stmt = table.delete().where(table.c.memory_id.in_(memory_ids))
1146
+ if user_id is not None:
1147
+ delete_stmt = delete_stmt.where(table.c.user_id == user_id)
838
1148
  result = sess.execute(delete_stmt)
839
1149
  if result.rowcount == 0:
840
1150
  log_debug(f"No memories found with ids: {memory_ids}")
841
1151
 
842
1152
  except Exception as e:
843
1153
  log_error(f"Error deleting memories: {e}")
1154
+ raise e
844
1155
 
845
1156
  def get_all_memory_topics(self) -> List[str]:
846
1157
  """Get all memory topics from the database.
@@ -868,14 +1179,17 @@ class SingleStoreDb(BaseDb):
868
1179
 
869
1180
  except Exception as e:
870
1181
  log_error(f"Exception reading from memory table: {e}")
871
- return []
1182
+ raise e
872
1183
 
873
- def get_user_memory(self, memory_id: str, deserialize: Optional[bool] = True) -> Optional[UserMemory]:
1184
+ def get_user_memory(
1185
+ self, memory_id: str, deserialize: Optional[bool] = True, user_id: Optional[str] = None
1186
+ ) -> Optional[UserMemory]:
874
1187
  """Get a memory from the database.
875
1188
 
876
1189
  Args:
877
1190
  memory_id (str): The ID of the memory to get.
878
1191
  deserialize (Optional[bool]): Whether to serialize the memory. Defaults to True.
1192
+ user_id (Optional[str]): The ID of the user to filter by. Defaults to None.
879
1193
 
880
1194
  Returns:
881
1195
  Union[UserMemory, Dict[str, Any], None]:
@@ -892,6 +1206,8 @@ class SingleStoreDb(BaseDb):
892
1206
 
893
1207
  with self.Session() as sess, sess.begin():
894
1208
  stmt = select(table).where(table.c.memory_id == memory_id)
1209
+ if user_id is not None:
1210
+ stmt = stmt.where(table.c.user_id == user_id)
895
1211
 
896
1212
  result = sess.execute(stmt).fetchone()
897
1213
  if not result:
@@ -904,7 +1220,7 @@ class SingleStoreDb(BaseDb):
904
1220
 
905
1221
  except Exception as e:
906
1222
  log_error(f"Exception reading from memory table: {e}")
907
- return None
1223
+ raise e
908
1224
 
909
1225
  def get_user_memories(
910
1226
  self,
@@ -988,7 +1304,7 @@ class SingleStoreDb(BaseDb):
988
1304
 
989
1305
  except Exception as e:
990
1306
  log_error(f"Exception reading from memory table: {e}")
991
- return []
1307
+ raise e
992
1308
 
993
1309
  def get_user_memory_stats(
994
1310
  self, limit: Optional[int] = None, page: Optional[int] = None
@@ -1055,7 +1371,7 @@ class SingleStoreDb(BaseDb):
1055
1371
 
1056
1372
  except Exception as e:
1057
1373
  log_error(f"Exception getting user memory stats: {e}")
1058
- return [], 0
1374
+ raise e
1059
1375
 
1060
1376
  def upsert_user_memory(
1061
1377
  self, memory: UserMemory, deserialize: Optional[bool] = True
@@ -1083,6 +1399,8 @@ class SingleStoreDb(BaseDb):
1083
1399
  if memory.memory_id is None:
1084
1400
  memory.memory_id = str(uuid4())
1085
1401
 
1402
+ current_time = int(time.time())
1403
+
1086
1404
  stmt = mysql.insert(table).values(
1087
1405
  memory_id=memory.memory_id,
1088
1406
  memory=memory.memory,
@@ -1091,7 +1409,9 @@ class SingleStoreDb(BaseDb):
1091
1409
  agent_id=memory.agent_id,
1092
1410
  team_id=memory.team_id,
1093
1411
  topics=memory.topics,
1094
- updated_at=int(time.time()),
1412
+ feedback=memory.feedback,
1413
+ created_at=memory.created_at,
1414
+ updated_at=current_time,
1095
1415
  )
1096
1416
  stmt = stmt.on_duplicate_key_update(
1097
1417
  memory=stmt.inserted.memory,
@@ -1100,7 +1420,10 @@ class SingleStoreDb(BaseDb):
1100
1420
  user_id=stmt.inserted.user_id,
1101
1421
  agent_id=stmt.inserted.agent_id,
1102
1422
  team_id=stmt.inserted.team_id,
1103
- updated_at=int(time.time()),
1423
+ feedback=stmt.inserted.feedback,
1424
+ updated_at=stmt.inserted.updated_at,
1425
+ # Preserve created_at on update - don't overwrite existing value
1426
+ created_at=table.c.created_at,
1104
1427
  )
1105
1428
 
1106
1429
  sess.execute(stmt)
@@ -1119,7 +1442,93 @@ class SingleStoreDb(BaseDb):
1119
1442
 
1120
1443
  except Exception as e:
1121
1444
  log_error(f"Error upserting user memory: {e}")
1122
- return None
1445
+ raise e
1446
+
1447
+ def upsert_memories(
1448
+ self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
1449
+ ) -> List[Union[UserMemory, Dict[str, Any]]]:
1450
+ """
1451
+ Bulk upsert multiple user memories for improved performance on large datasets.
1452
+
1453
+ Args:
1454
+ memories (List[UserMemory]): List of memories to upsert.
1455
+ deserialize (Optional[bool]): Whether to deserialize the memories. Defaults to True.
1456
+
1457
+ Returns:
1458
+ List[Union[UserMemory, Dict[str, Any]]]: List of upserted memories.
1459
+
1460
+ Raises:
1461
+ Exception: If an error occurs during bulk upsert.
1462
+ """
1463
+ if not memories:
1464
+ return []
1465
+
1466
+ try:
1467
+ table = self._get_table(table_type="memories", create_table_if_not_found=True)
1468
+ if table is None:
1469
+ return []
1470
+
1471
+ # Prepare data for bulk insert
1472
+ memory_data = []
1473
+ current_time = int(time.time())
1474
+
1475
+ for memory in memories:
1476
+ if memory.memory_id is None:
1477
+ memory.memory_id = str(uuid4())
1478
+ # Use preserved updated_at if flag is set, otherwise use current time
1479
+ updated_at = memory.updated_at if preserve_updated_at else current_time
1480
+
1481
+ memory_data.append(
1482
+ {
1483
+ "memory_id": memory.memory_id,
1484
+ "memory": memory.memory,
1485
+ "input": memory.input,
1486
+ "user_id": memory.user_id,
1487
+ "agent_id": memory.agent_id,
1488
+ "team_id": memory.team_id,
1489
+ "topics": memory.topics,
1490
+ "feedback": memory.feedback,
1491
+ "created_at": memory.created_at,
1492
+ "updated_at": updated_at,
1493
+ }
1494
+ )
1495
+
1496
+ results: List[Union[UserMemory, Dict[str, Any]]] = []
1497
+
1498
+ with self.Session() as sess, sess.begin():
1499
+ if memory_data:
1500
+ stmt = mysql.insert(table)
1501
+ stmt = stmt.on_duplicate_key_update(
1502
+ memory=stmt.inserted.memory,
1503
+ topics=stmt.inserted.topics,
1504
+ input=stmt.inserted.input,
1505
+ user_id=stmt.inserted.user_id,
1506
+ agent_id=stmt.inserted.agent_id,
1507
+ team_id=stmt.inserted.team_id,
1508
+ feedback=stmt.inserted.feedback,
1509
+ updated_at=stmt.inserted.updated_at,
1510
+ # Preserve created_at on update
1511
+ created_at=table.c.created_at,
1512
+ )
1513
+ sess.execute(stmt, memory_data)
1514
+
1515
+ # Fetch the results
1516
+ memory_ids = [memory.memory_id for memory in memories if memory.memory_id]
1517
+ select_stmt = select(table).where(table.c.memory_id.in_(memory_ids))
1518
+ result = sess.execute(select_stmt).fetchall()
1519
+
1520
+ for row in result:
1521
+ memory_raw = dict(row._mapping)
1522
+ if deserialize:
1523
+ results.append(UserMemory.from_dict(memory_raw))
1524
+ else:
1525
+ results.append(memory_raw)
1526
+
1527
+ return results
1528
+
1529
+ except Exception as e:
1530
+ log_error(f"Exception during bulk memory upsert: {e}")
1531
+ return []
1123
1532
 
1124
1533
  def clear_memories(self) -> None:
1125
1534
  """Delete all memories from the database.
@@ -1136,7 +1545,8 @@ class SingleStoreDb(BaseDb):
1136
1545
  sess.execute(table.delete())
1137
1546
 
1138
1547
  except Exception as e:
1139
- log_warning(f"Exception deleting all memories: {e}")
1548
+ log_error(f"Exception deleting all memories: {e}")
1549
+ raise e
1140
1550
 
1141
1551
  # -- Metrics methods --
1142
1552
  def _get_all_sessions_for_metrics_calculation(
@@ -1278,7 +1688,7 @@ class SingleStoreDb(BaseDb):
1278
1688
  return metrics_records
1279
1689
 
1280
1690
  except Exception as e:
1281
- log_error(f"Error refreshing metrics: {e}")
1691
+ log_error(f"Error calculating metrics: {e}")
1282
1692
  raise e
1283
1693
 
1284
1694
  def get_metrics(
@@ -1321,7 +1731,7 @@ class SingleStoreDb(BaseDb):
1321
1731
 
1322
1732
  except Exception as e:
1323
1733
  log_error(f"Error getting metrics: {e}")
1324
- return [], None
1734
+ raise e
1325
1735
 
1326
1736
  # -- Knowledge methods --
1327
1737
 
@@ -1331,15 +1741,19 @@ class SingleStoreDb(BaseDb):
1331
1741
  Args:
1332
1742
  id (str): The ID of the knowledge row to delete.
1333
1743
  """
1334
- table = self._get_table(table_type="knowledge")
1335
- if table is None:
1336
- return
1744
+ try:
1745
+ table = self._get_table(table_type="knowledge")
1746
+ if table is None:
1747
+ return
1337
1748
 
1338
- with self.Session() as sess, sess.begin():
1339
- stmt = table.delete().where(table.c.id == id)
1340
- sess.execute(stmt)
1749
+ with self.Session() as sess, sess.begin():
1750
+ stmt = table.delete().where(table.c.id == id)
1751
+ sess.execute(stmt)
1341
1752
 
1342
- log_debug(f"Deleted knowledge content with id '{id}'")
1753
+ log_debug(f"Deleted knowledge content with id '{id}'")
1754
+ except Exception as e:
1755
+ log_error(f"Error deleting knowledge content: {e}")
1756
+ raise e
1343
1757
 
1344
1758
  def get_knowledge_content(self, id: str) -> Optional[KnowledgeRow]:
1345
1759
  """Get a knowledge row from the database.
@@ -1350,16 +1764,20 @@ class SingleStoreDb(BaseDb):
1350
1764
  Returns:
1351
1765
  Optional[KnowledgeRow]: The knowledge row, or None if it doesn't exist.
1352
1766
  """
1353
- table = self._get_table(table_type="knowledge")
1354
- if table is None:
1355
- return None
1356
-
1357
- with self.Session() as sess, sess.begin():
1358
- stmt = select(table).where(table.c.id == id)
1359
- result = sess.execute(stmt).fetchone()
1360
- if result is None:
1767
+ try:
1768
+ table = self._get_table(table_type="knowledge")
1769
+ if table is None:
1361
1770
  return None
1362
- return KnowledgeRow.model_validate(result._mapping)
1771
+
1772
+ with self.Session() as sess, sess.begin():
1773
+ stmt = select(table).where(table.c.id == id)
1774
+ result = sess.execute(stmt).fetchone()
1775
+ if result is None:
1776
+ return None
1777
+ return KnowledgeRow.model_validate(result._mapping)
1778
+ except Exception as e:
1779
+ log_error(f"Error getting knowledge content: {e}")
1780
+ raise e
1363
1781
 
1364
1782
  def get_knowledge_contents(
1365
1783
  self,
@@ -1412,7 +1830,7 @@ class SingleStoreDb(BaseDb):
1412
1830
 
1413
1831
  except Exception as e:
1414
1832
  log_error(f"Error getting knowledge contents: {e}")
1415
- return [], 0
1833
+ raise e
1416
1834
 
1417
1835
  def upsert_knowledge_content(self, knowledge_row: KnowledgeRow):
1418
1836
  """Upsert knowledge content in the database.
@@ -1457,7 +1875,7 @@ class SingleStoreDb(BaseDb):
1457
1875
 
1458
1876
  except Exception as e:
1459
1877
  log_error(f"Error upserting knowledge row: {e}")
1460
- return None
1878
+ raise e
1461
1879
 
1462
1880
  # -- Eval methods --
1463
1881
 
@@ -1491,7 +1909,7 @@ class SingleStoreDb(BaseDb):
1491
1909
 
1492
1910
  except Exception as e:
1493
1911
  log_error(f"Error creating eval run: {e}")
1494
- return None
1912
+ raise e
1495
1913
 
1496
1914
  def delete_eval_run(self, eval_run_id: str) -> None:
1497
1915
  """Delete an eval run from the database.
@@ -1514,7 +1932,7 @@ class SingleStoreDb(BaseDb):
1514
1932
 
1515
1933
  except Exception as e:
1516
1934
  log_error(f"Error deleting eval run {eval_run_id}: {e}")
1517
- raise
1935
+ raise e
1518
1936
 
1519
1937
  def delete_eval_runs(self, eval_run_ids: List[str]) -> None:
1520
1938
  """Delete multiple eval runs from the database.
@@ -1537,7 +1955,7 @@ class SingleStoreDb(BaseDb):
1537
1955
 
1538
1956
  except Exception as e:
1539
1957
  log_error(f"Error deleting eval runs {eval_run_ids}: {e}")
1540
- raise
1958
+ raise e
1541
1959
 
1542
1960
  def get_eval_run(
1543
1961
  self, eval_run_id: str, deserialize: Optional[bool] = True
@@ -1575,7 +1993,7 @@ class SingleStoreDb(BaseDb):
1575
1993
 
1576
1994
  except Exception as e:
1577
1995
  log_error(f"Exception getting eval run {eval_run_id}: {e}")
1578
- return None
1996
+ raise e
1579
1997
 
1580
1998
  def get_eval_runs(
1581
1999
  self,
@@ -1670,7 +2088,7 @@ class SingleStoreDb(BaseDb):
1670
2088
 
1671
2089
  except Exception as e:
1672
2090
  log_error(f"Exception getting eval runs: {e}")
1673
- return [] if deserialize else ([], 0)
2091
+ raise e
1674
2092
 
1675
2093
  def rename_eval_run(
1676
2094
  self, eval_run_id: str, name: str, deserialize: Optional[bool] = True
@@ -1709,4 +2127,220 @@ class SingleStoreDb(BaseDb):
1709
2127
 
1710
2128
  except Exception as e:
1711
2129
  log_error(f"Error renaming eval run {eval_run_id}: {e}")
1712
- raise
2130
+ raise e
2131
+
2132
+ # -- Culture methods --
2133
+
2134
+ def clear_cultural_knowledge(self) -> None:
2135
+ """Delete all cultural knowledge from the database.
2136
+
2137
+ Raises:
2138
+ Exception: If an error occurs during deletion.
2139
+ """
2140
+ try:
2141
+ table = self._get_table(table_type="culture")
2142
+ if table is None:
2143
+ return
2144
+
2145
+ with self.Session() as sess, sess.begin():
2146
+ sess.execute(table.delete())
2147
+
2148
+ except Exception as e:
2149
+ log_warning(f"Exception deleting all cultural knowledge: {e}")
2150
+ raise e
2151
+
2152
+ def delete_cultural_knowledge(self, id: str) -> None:
2153
+ """Delete a cultural knowledge entry from the database.
2154
+
2155
+ Args:
2156
+ id (str): The ID of the cultural knowledge to delete.
2157
+
2158
+ Raises:
2159
+ Exception: If an error occurs during deletion.
2160
+ """
2161
+ try:
2162
+ table = self._get_table(table_type="culture")
2163
+ if table is None:
2164
+ return
2165
+
2166
+ with self.Session() as sess, sess.begin():
2167
+ delete_stmt = table.delete().where(table.c.id == id)
2168
+ result = sess.execute(delete_stmt)
2169
+
2170
+ success = result.rowcount > 0
2171
+ if success:
2172
+ log_debug(f"Successfully deleted cultural knowledge id: {id}")
2173
+ else:
2174
+ log_debug(f"No cultural knowledge found with id: {id}")
2175
+
2176
+ except Exception as e:
2177
+ log_error(f"Error deleting cultural knowledge: {e}")
2178
+ raise e
2179
+
2180
+ def get_cultural_knowledge(
2181
+ self, id: str, deserialize: Optional[bool] = True
2182
+ ) -> Optional[Union[CulturalKnowledge, Dict[str, Any]]]:
2183
+ """Get a cultural knowledge entry from the database.
2184
+
2185
+ Args:
2186
+ id (str): The ID of the cultural knowledge to get.
2187
+ deserialize (Optional[bool]): Whether to deserialize the cultural knowledge. Defaults to True.
2188
+
2189
+ Returns:
2190
+ Optional[Union[CulturalKnowledge, Dict[str, Any]]]: The cultural knowledge entry, or None if it doesn't exist.
2191
+
2192
+ Raises:
2193
+ Exception: If an error occurs during retrieval.
2194
+ """
2195
+ try:
2196
+ table = self._get_table(table_type="culture")
2197
+ if table is None:
2198
+ return None
2199
+
2200
+ with self.Session() as sess, sess.begin():
2201
+ stmt = select(table).where(table.c.id == id)
2202
+ result = sess.execute(stmt).fetchone()
2203
+ if result is None:
2204
+ return None
2205
+
2206
+ db_row = dict(result._mapping)
2207
+ if not db_row or not deserialize:
2208
+ return db_row
2209
+
2210
+ return deserialize_cultural_knowledge_from_db(db_row)
2211
+
2212
+ except Exception as e:
2213
+ log_error(f"Exception reading from cultural knowledge table: {e}")
2214
+ raise e
2215
+
2216
+ def get_all_cultural_knowledge(
2217
+ self,
2218
+ name: Optional[str] = None,
2219
+ agent_id: Optional[str] = None,
2220
+ team_id: Optional[str] = None,
2221
+ limit: Optional[int] = None,
2222
+ page: Optional[int] = None,
2223
+ sort_by: Optional[str] = None,
2224
+ sort_order: Optional[str] = None,
2225
+ deserialize: Optional[bool] = True,
2226
+ ) -> Union[List[CulturalKnowledge], Tuple[List[Dict[str, Any]], int]]:
2227
+ """Get all cultural knowledge from the database as CulturalKnowledge objects.
2228
+
2229
+ Args:
2230
+ name (Optional[str]): The name of the cultural knowledge to filter by.
2231
+ agent_id (Optional[str]): The ID of the agent to filter by.
2232
+ team_id (Optional[str]): The ID of the team to filter by.
2233
+ limit (Optional[int]): The maximum number of cultural knowledge entries to return.
2234
+ page (Optional[int]): The page number.
2235
+ sort_by (Optional[str]): The column to sort by.
2236
+ sort_order (Optional[str]): The order to sort by.
2237
+ deserialize (Optional[bool]): Whether to deserialize the cultural knowledge. Defaults to True.
2238
+
2239
+ Returns:
2240
+ Union[List[CulturalKnowledge], Tuple[List[Dict[str, Any]], int]]:
2241
+ - When deserialize=True: List of CulturalKnowledge objects
2242
+ - When deserialize=False: List of CulturalKnowledge dictionaries and total count
2243
+
2244
+ Raises:
2245
+ Exception: If an error occurs during retrieval.
2246
+ """
2247
+ try:
2248
+ table = self._get_table(table_type="culture")
2249
+ if table is None:
2250
+ return [] if deserialize else ([], 0)
2251
+
2252
+ with self.Session() as sess, sess.begin():
2253
+ stmt = select(table)
2254
+
2255
+ # Filtering
2256
+ if name is not None:
2257
+ stmt = stmt.where(table.c.name == name)
2258
+ if agent_id is not None:
2259
+ stmt = stmt.where(table.c.agent_id == agent_id)
2260
+ if team_id is not None:
2261
+ stmt = stmt.where(table.c.team_id == team_id)
2262
+
2263
+ # Get total count after applying filtering
2264
+ count_stmt = select(func.count()).select_from(stmt.alias())
2265
+ total_count = sess.execute(count_stmt).scalar()
2266
+
2267
+ # Sorting
2268
+ stmt = apply_sorting(stmt, table, sort_by, sort_order)
2269
+ # Paginating
2270
+ if limit is not None:
2271
+ stmt = stmt.limit(limit)
2272
+ if page is not None:
2273
+ stmt = stmt.offset((page - 1) * limit)
2274
+
2275
+ result = sess.execute(stmt).fetchall()
2276
+ if not result:
2277
+ return [] if deserialize else ([], 0)
2278
+
2279
+ db_rows = [dict(record._mapping) for record in result]
2280
+
2281
+ if not deserialize:
2282
+ return db_rows, total_count
2283
+
2284
+ return [deserialize_cultural_knowledge_from_db(row) for row in db_rows]
2285
+
2286
+ except Exception as e:
2287
+ log_error(f"Error reading from cultural knowledge table: {e}")
2288
+ raise e
2289
+
2290
+ def upsert_cultural_knowledge(
2291
+ self, cultural_knowledge: CulturalKnowledge, deserialize: Optional[bool] = True
2292
+ ) -> Optional[Union[CulturalKnowledge, Dict[str, Any]]]:
2293
+ """Upsert a cultural knowledge entry into the database.
2294
+
2295
+ Args:
2296
+ cultural_knowledge (CulturalKnowledge): The cultural knowledge to upsert.
2297
+ deserialize (Optional[bool]): Whether to deserialize the cultural knowledge. Defaults to True.
2298
+
2299
+ Returns:
2300
+ Optional[CulturalKnowledge]: The upserted cultural knowledge entry.
2301
+
2302
+ Raises:
2303
+ Exception: If an error occurs during upsert.
2304
+ """
2305
+ try:
2306
+ table = self._get_table(table_type="culture", create_table_if_not_found=True)
2307
+ if table is None:
2308
+ return None
2309
+
2310
+ if cultural_knowledge.id is None:
2311
+ cultural_knowledge.id = str(uuid4())
2312
+
2313
+ # Serialize content, categories, and notes into a JSON dict for DB storage
2314
+ content_dict = serialize_cultural_knowledge_for_db(cultural_knowledge)
2315
+
2316
+ with self.Session() as sess, sess.begin():
2317
+ stmt = mysql.insert(table).values(
2318
+ id=cultural_knowledge.id,
2319
+ name=cultural_knowledge.name,
2320
+ summary=cultural_knowledge.summary,
2321
+ content=content_dict if content_dict else None,
2322
+ metadata=cultural_knowledge.metadata,
2323
+ input=cultural_knowledge.input,
2324
+ created_at=cultural_knowledge.created_at,
2325
+ updated_at=int(time.time()),
2326
+ agent_id=cultural_knowledge.agent_id,
2327
+ team_id=cultural_knowledge.team_id,
2328
+ )
2329
+ stmt = stmt.on_duplicate_key_update(
2330
+ name=cultural_knowledge.name,
2331
+ summary=cultural_knowledge.summary,
2332
+ content=content_dict if content_dict else None,
2333
+ metadata=cultural_knowledge.metadata,
2334
+ input=cultural_knowledge.input,
2335
+ updated_at=int(time.time()),
2336
+ agent_id=cultural_knowledge.agent_id,
2337
+ team_id=cultural_knowledge.team_id,
2338
+ )
2339
+ sess.execute(stmt)
2340
+
2341
+ # Fetch the inserted/updated row
2342
+ return self.get_cultural_knowledge(id=cultural_knowledge.id, deserialize=deserialize)
2343
+
2344
+ except Exception as e:
2345
+ log_error(f"Error upserting cultural knowledge: {e}")
2346
+ raise e