agno 2.1.2__py3-none-any.whl → 2.3.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (314) hide show
  1. agno/agent/agent.py +5540 -2273
  2. agno/api/api.py +2 -0
  3. agno/api/os.py +1 -1
  4. agno/compression/__init__.py +3 -0
  5. agno/compression/manager.py +247 -0
  6. agno/culture/__init__.py +3 -0
  7. agno/culture/manager.py +956 -0
  8. agno/db/async_postgres/__init__.py +3 -0
  9. agno/db/base.py +689 -6
  10. agno/db/dynamo/dynamo.py +933 -37
  11. agno/db/dynamo/schemas.py +174 -10
  12. agno/db/dynamo/utils.py +63 -4
  13. agno/db/firestore/firestore.py +831 -9
  14. agno/db/firestore/schemas.py +51 -0
  15. agno/db/firestore/utils.py +102 -4
  16. agno/db/gcs_json/gcs_json_db.py +660 -12
  17. agno/db/gcs_json/utils.py +60 -26
  18. agno/db/in_memory/in_memory_db.py +287 -14
  19. agno/db/in_memory/utils.py +60 -2
  20. agno/db/json/json_db.py +590 -14
  21. agno/db/json/utils.py +60 -26
  22. agno/db/migrations/manager.py +199 -0
  23. agno/db/migrations/v1_to_v2.py +43 -13
  24. agno/db/migrations/versions/__init__.py +0 -0
  25. agno/db/migrations/versions/v2_3_0.py +938 -0
  26. agno/db/mongo/__init__.py +15 -1
  27. agno/db/mongo/async_mongo.py +2760 -0
  28. agno/db/mongo/mongo.py +879 -11
  29. agno/db/mongo/schemas.py +42 -0
  30. agno/db/mongo/utils.py +80 -8
  31. agno/db/mysql/__init__.py +2 -1
  32. agno/db/mysql/async_mysql.py +2912 -0
  33. agno/db/mysql/mysql.py +946 -68
  34. agno/db/mysql/schemas.py +72 -10
  35. agno/db/mysql/utils.py +198 -7
  36. agno/db/postgres/__init__.py +2 -1
  37. agno/db/postgres/async_postgres.py +2579 -0
  38. agno/db/postgres/postgres.py +942 -57
  39. agno/db/postgres/schemas.py +81 -18
  40. agno/db/postgres/utils.py +164 -2
  41. agno/db/redis/redis.py +671 -7
  42. agno/db/redis/schemas.py +50 -0
  43. agno/db/redis/utils.py +65 -7
  44. agno/db/schemas/__init__.py +2 -1
  45. agno/db/schemas/culture.py +120 -0
  46. agno/db/schemas/evals.py +1 -0
  47. agno/db/schemas/memory.py +17 -2
  48. agno/db/singlestore/schemas.py +63 -0
  49. agno/db/singlestore/singlestore.py +949 -83
  50. agno/db/singlestore/utils.py +60 -2
  51. agno/db/sqlite/__init__.py +2 -1
  52. agno/db/sqlite/async_sqlite.py +2911 -0
  53. agno/db/sqlite/schemas.py +62 -0
  54. agno/db/sqlite/sqlite.py +965 -46
  55. agno/db/sqlite/utils.py +169 -8
  56. agno/db/surrealdb/__init__.py +3 -0
  57. agno/db/surrealdb/metrics.py +292 -0
  58. agno/db/surrealdb/models.py +334 -0
  59. agno/db/surrealdb/queries.py +71 -0
  60. agno/db/surrealdb/surrealdb.py +1908 -0
  61. agno/db/surrealdb/utils.py +147 -0
  62. agno/db/utils.py +2 -0
  63. agno/eval/__init__.py +10 -0
  64. agno/eval/accuracy.py +75 -55
  65. agno/eval/agent_as_judge.py +861 -0
  66. agno/eval/base.py +29 -0
  67. agno/eval/performance.py +16 -7
  68. agno/eval/reliability.py +28 -16
  69. agno/eval/utils.py +35 -17
  70. agno/exceptions.py +27 -2
  71. agno/filters.py +354 -0
  72. agno/guardrails/prompt_injection.py +1 -0
  73. agno/hooks/__init__.py +3 -0
  74. agno/hooks/decorator.py +164 -0
  75. agno/integrations/discord/client.py +1 -1
  76. agno/knowledge/chunking/agentic.py +13 -10
  77. agno/knowledge/chunking/fixed.py +4 -1
  78. agno/knowledge/chunking/semantic.py +9 -4
  79. agno/knowledge/chunking/strategy.py +59 -15
  80. agno/knowledge/embedder/fastembed.py +1 -1
  81. agno/knowledge/embedder/nebius.py +1 -1
  82. agno/knowledge/embedder/ollama.py +8 -0
  83. agno/knowledge/embedder/openai.py +8 -8
  84. agno/knowledge/embedder/sentence_transformer.py +6 -2
  85. agno/knowledge/embedder/vllm.py +262 -0
  86. agno/knowledge/knowledge.py +1618 -318
  87. agno/knowledge/reader/base.py +6 -2
  88. agno/knowledge/reader/csv_reader.py +8 -10
  89. agno/knowledge/reader/docx_reader.py +5 -6
  90. agno/knowledge/reader/field_labeled_csv_reader.py +16 -20
  91. agno/knowledge/reader/json_reader.py +5 -4
  92. agno/knowledge/reader/markdown_reader.py +8 -8
  93. agno/knowledge/reader/pdf_reader.py +17 -19
  94. agno/knowledge/reader/pptx_reader.py +101 -0
  95. agno/knowledge/reader/reader_factory.py +32 -3
  96. agno/knowledge/reader/s3_reader.py +3 -3
  97. agno/knowledge/reader/tavily_reader.py +193 -0
  98. agno/knowledge/reader/text_reader.py +22 -10
  99. agno/knowledge/reader/web_search_reader.py +1 -48
  100. agno/knowledge/reader/website_reader.py +10 -10
  101. agno/knowledge/reader/wikipedia_reader.py +33 -1
  102. agno/knowledge/types.py +1 -0
  103. agno/knowledge/utils.py +72 -7
  104. agno/media.py +22 -6
  105. agno/memory/__init__.py +14 -1
  106. agno/memory/manager.py +544 -83
  107. agno/memory/strategies/__init__.py +15 -0
  108. agno/memory/strategies/base.py +66 -0
  109. agno/memory/strategies/summarize.py +196 -0
  110. agno/memory/strategies/types.py +37 -0
  111. agno/models/aimlapi/aimlapi.py +17 -0
  112. agno/models/anthropic/claude.py +515 -40
  113. agno/models/aws/bedrock.py +102 -21
  114. agno/models/aws/claude.py +131 -274
  115. agno/models/azure/ai_foundry.py +41 -19
  116. agno/models/azure/openai_chat.py +39 -8
  117. agno/models/base.py +1249 -525
  118. agno/models/cerebras/cerebras.py +91 -21
  119. agno/models/cerebras/cerebras_openai.py +21 -2
  120. agno/models/cohere/chat.py +40 -6
  121. agno/models/cometapi/cometapi.py +18 -1
  122. agno/models/dashscope/dashscope.py +2 -3
  123. agno/models/deepinfra/deepinfra.py +18 -1
  124. agno/models/deepseek/deepseek.py +69 -3
  125. agno/models/fireworks/fireworks.py +18 -1
  126. agno/models/google/gemini.py +877 -80
  127. agno/models/google/utils.py +22 -0
  128. agno/models/groq/groq.py +51 -18
  129. agno/models/huggingface/huggingface.py +17 -6
  130. agno/models/ibm/watsonx.py +16 -6
  131. agno/models/internlm/internlm.py +18 -1
  132. agno/models/langdb/langdb.py +13 -1
  133. agno/models/litellm/chat.py +44 -9
  134. agno/models/litellm/litellm_openai.py +18 -1
  135. agno/models/message.py +28 -5
  136. agno/models/meta/llama.py +47 -14
  137. agno/models/meta/llama_openai.py +22 -17
  138. agno/models/mistral/mistral.py +8 -4
  139. agno/models/nebius/nebius.py +6 -7
  140. agno/models/nvidia/nvidia.py +20 -3
  141. agno/models/ollama/chat.py +24 -8
  142. agno/models/openai/chat.py +104 -29
  143. agno/models/openai/responses.py +101 -81
  144. agno/models/openrouter/openrouter.py +60 -3
  145. agno/models/perplexity/perplexity.py +17 -1
  146. agno/models/portkey/portkey.py +7 -6
  147. agno/models/requesty/requesty.py +24 -4
  148. agno/models/response.py +73 -2
  149. agno/models/sambanova/sambanova.py +20 -3
  150. agno/models/siliconflow/siliconflow.py +19 -2
  151. agno/models/together/together.py +20 -3
  152. agno/models/utils.py +254 -8
  153. agno/models/vercel/v0.py +20 -3
  154. agno/models/vertexai/__init__.py +0 -0
  155. agno/models/vertexai/claude.py +190 -0
  156. agno/models/vllm/vllm.py +19 -14
  157. agno/models/xai/xai.py +19 -2
  158. agno/os/app.py +549 -152
  159. agno/os/auth.py +190 -3
  160. agno/os/config.py +23 -0
  161. agno/os/interfaces/a2a/router.py +8 -11
  162. agno/os/interfaces/a2a/utils.py +1 -1
  163. agno/os/interfaces/agui/router.py +18 -3
  164. agno/os/interfaces/agui/utils.py +152 -39
  165. agno/os/interfaces/slack/router.py +55 -37
  166. agno/os/interfaces/slack/slack.py +9 -1
  167. agno/os/interfaces/whatsapp/router.py +0 -1
  168. agno/os/interfaces/whatsapp/security.py +3 -1
  169. agno/os/mcp.py +110 -52
  170. agno/os/middleware/__init__.py +2 -0
  171. agno/os/middleware/jwt.py +676 -112
  172. agno/os/router.py +40 -1478
  173. agno/os/routers/agents/__init__.py +3 -0
  174. agno/os/routers/agents/router.py +599 -0
  175. agno/os/routers/agents/schema.py +261 -0
  176. agno/os/routers/evals/evals.py +96 -39
  177. agno/os/routers/evals/schemas.py +65 -33
  178. agno/os/routers/evals/utils.py +80 -10
  179. agno/os/routers/health.py +10 -4
  180. agno/os/routers/knowledge/knowledge.py +196 -38
  181. agno/os/routers/knowledge/schemas.py +82 -22
  182. agno/os/routers/memory/memory.py +279 -52
  183. agno/os/routers/memory/schemas.py +46 -17
  184. agno/os/routers/metrics/metrics.py +20 -8
  185. agno/os/routers/metrics/schemas.py +16 -16
  186. agno/os/routers/session/session.py +462 -34
  187. agno/os/routers/teams/__init__.py +3 -0
  188. agno/os/routers/teams/router.py +512 -0
  189. agno/os/routers/teams/schema.py +257 -0
  190. agno/os/routers/traces/__init__.py +3 -0
  191. agno/os/routers/traces/schemas.py +414 -0
  192. agno/os/routers/traces/traces.py +499 -0
  193. agno/os/routers/workflows/__init__.py +3 -0
  194. agno/os/routers/workflows/router.py +624 -0
  195. agno/os/routers/workflows/schema.py +75 -0
  196. agno/os/schema.py +256 -693
  197. agno/os/scopes.py +469 -0
  198. agno/os/utils.py +514 -36
  199. agno/reasoning/anthropic.py +80 -0
  200. agno/reasoning/gemini.py +73 -0
  201. agno/reasoning/openai.py +5 -0
  202. agno/reasoning/vertexai.py +76 -0
  203. agno/run/__init__.py +6 -0
  204. agno/run/agent.py +155 -32
  205. agno/run/base.py +55 -3
  206. agno/run/requirement.py +181 -0
  207. agno/run/team.py +125 -38
  208. agno/run/workflow.py +72 -18
  209. agno/session/agent.py +102 -89
  210. agno/session/summary.py +56 -15
  211. agno/session/team.py +164 -90
  212. agno/session/workflow.py +405 -40
  213. agno/table.py +10 -0
  214. agno/team/team.py +3974 -1903
  215. agno/tools/dalle.py +2 -4
  216. agno/tools/eleven_labs.py +23 -25
  217. agno/tools/exa.py +21 -16
  218. agno/tools/file.py +153 -23
  219. agno/tools/file_generation.py +16 -10
  220. agno/tools/firecrawl.py +15 -7
  221. agno/tools/function.py +193 -38
  222. agno/tools/gmail.py +238 -14
  223. agno/tools/google_drive.py +271 -0
  224. agno/tools/googlecalendar.py +36 -8
  225. agno/tools/googlesheets.py +20 -5
  226. agno/tools/jira.py +20 -0
  227. agno/tools/mcp/__init__.py +10 -0
  228. agno/tools/mcp/mcp.py +331 -0
  229. agno/tools/mcp/multi_mcp.py +347 -0
  230. agno/tools/mcp/params.py +24 -0
  231. agno/tools/mcp_toolbox.py +3 -3
  232. agno/tools/models/nebius.py +5 -5
  233. agno/tools/models_labs.py +20 -10
  234. agno/tools/nano_banana.py +151 -0
  235. agno/tools/notion.py +204 -0
  236. agno/tools/parallel.py +314 -0
  237. agno/tools/postgres.py +76 -36
  238. agno/tools/redshift.py +406 -0
  239. agno/tools/scrapegraph.py +1 -1
  240. agno/tools/shopify.py +1519 -0
  241. agno/tools/slack.py +18 -3
  242. agno/tools/spotify.py +919 -0
  243. agno/tools/tavily.py +146 -0
  244. agno/tools/toolkit.py +25 -0
  245. agno/tools/workflow.py +8 -1
  246. agno/tools/yfinance.py +12 -11
  247. agno/tracing/__init__.py +12 -0
  248. agno/tracing/exporter.py +157 -0
  249. agno/tracing/schemas.py +276 -0
  250. agno/tracing/setup.py +111 -0
  251. agno/utils/agent.py +938 -0
  252. agno/utils/cryptography.py +22 -0
  253. agno/utils/dttm.py +33 -0
  254. agno/utils/events.py +151 -3
  255. agno/utils/gemini.py +15 -5
  256. agno/utils/hooks.py +118 -4
  257. agno/utils/http.py +113 -2
  258. agno/utils/knowledge.py +12 -5
  259. agno/utils/log.py +1 -0
  260. agno/utils/mcp.py +92 -2
  261. agno/utils/media.py +187 -1
  262. agno/utils/merge_dict.py +3 -3
  263. agno/utils/message.py +60 -0
  264. agno/utils/models/ai_foundry.py +9 -2
  265. agno/utils/models/claude.py +49 -14
  266. agno/utils/models/cohere.py +9 -2
  267. agno/utils/models/llama.py +9 -2
  268. agno/utils/models/mistral.py +4 -2
  269. agno/utils/print_response/agent.py +109 -16
  270. agno/utils/print_response/team.py +223 -30
  271. agno/utils/print_response/workflow.py +251 -34
  272. agno/utils/streamlit.py +1 -1
  273. agno/utils/team.py +98 -9
  274. agno/utils/tokens.py +657 -0
  275. agno/vectordb/base.py +39 -7
  276. agno/vectordb/cassandra/cassandra.py +21 -5
  277. agno/vectordb/chroma/chromadb.py +43 -12
  278. agno/vectordb/clickhouse/clickhousedb.py +21 -5
  279. agno/vectordb/couchbase/couchbase.py +29 -5
  280. agno/vectordb/lancedb/lance_db.py +92 -181
  281. agno/vectordb/langchaindb/langchaindb.py +24 -4
  282. agno/vectordb/lightrag/lightrag.py +17 -3
  283. agno/vectordb/llamaindex/llamaindexdb.py +25 -5
  284. agno/vectordb/milvus/milvus.py +50 -37
  285. agno/vectordb/mongodb/__init__.py +7 -1
  286. agno/vectordb/mongodb/mongodb.py +36 -30
  287. agno/vectordb/pgvector/pgvector.py +201 -77
  288. agno/vectordb/pineconedb/pineconedb.py +41 -23
  289. agno/vectordb/qdrant/qdrant.py +67 -54
  290. agno/vectordb/redis/__init__.py +9 -0
  291. agno/vectordb/redis/redisdb.py +682 -0
  292. agno/vectordb/singlestore/singlestore.py +50 -29
  293. agno/vectordb/surrealdb/surrealdb.py +31 -41
  294. agno/vectordb/upstashdb/upstashdb.py +34 -6
  295. agno/vectordb/weaviate/weaviate.py +53 -14
  296. agno/workflow/__init__.py +2 -0
  297. agno/workflow/agent.py +299 -0
  298. agno/workflow/condition.py +120 -18
  299. agno/workflow/loop.py +77 -10
  300. agno/workflow/parallel.py +231 -143
  301. agno/workflow/router.py +118 -17
  302. agno/workflow/step.py +609 -170
  303. agno/workflow/steps.py +73 -6
  304. agno/workflow/types.py +96 -21
  305. agno/workflow/workflow.py +2039 -262
  306. {agno-2.1.2.dist-info → agno-2.3.13.dist-info}/METADATA +201 -66
  307. agno-2.3.13.dist-info/RECORD +613 -0
  308. agno/tools/googlesearch.py +0 -98
  309. agno/tools/mcp.py +0 -679
  310. agno/tools/memori.py +0 -339
  311. agno-2.1.2.dist-info/RECORD +0 -543
  312. {agno-2.1.2.dist-info → agno-2.3.13.dist-info}/WHEEL +0 -0
  313. {agno-2.1.2.dist-info → agno-2.3.13.dist-info}/licenses/LICENSE +0 -0
  314. {agno-2.1.2.dist-info → agno-2.3.13.dist-info}/top_level.txt +0 -0
agno/db/json/json_db.py CHANGED
@@ -3,17 +3,22 @@ import os
3
3
  import time
4
4
  from datetime import date, datetime, timedelta, timezone
5
5
  from pathlib import Path
6
- from typing import Any, Dict, List, Optional, Tuple, Union
6
+ from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
7
7
  from uuid import uuid4
8
8
 
9
+ if TYPE_CHECKING:
10
+ from agno.tracing.schemas import Span, Trace
11
+
9
12
  from agno.db.base import BaseDb, SessionType
10
13
  from agno.db.json.utils import (
11
14
  apply_sorting,
12
15
  calculate_date_metrics,
16
+ deserialize_cultural_knowledge_from_db,
13
17
  fetch_all_sessions_data,
14
18
  get_dates_to_calculate_metrics_for,
15
- hydrate_session,
19
+ serialize_cultural_knowledge_for_db,
16
20
  )
21
+ from agno.db.schemas.culture import CulturalKnowledge
17
22
  from agno.db.schemas.evals import EvalFilterType, EvalRunRecord, EvalType
18
23
  from agno.db.schemas.knowledge import KnowledgeRow
19
24
  from agno.db.schemas.memory import UserMemory
@@ -27,10 +32,13 @@ class JsonDb(BaseDb):
27
32
  self,
28
33
  db_path: Optional[str] = None,
29
34
  session_table: Optional[str] = None,
35
+ culture_table: Optional[str] = None,
30
36
  memory_table: Optional[str] = None,
31
37
  metrics_table: Optional[str] = None,
32
38
  eval_table: Optional[str] = None,
33
39
  knowledge_table: Optional[str] = None,
40
+ traces_table: Optional[str] = None,
41
+ spans_table: Optional[str] = None,
34
42
  id: Optional[str] = None,
35
43
  ):
36
44
  """
@@ -39,10 +47,13 @@ class JsonDb(BaseDb):
39
47
  Args:
40
48
  db_path (Optional[str]): Path to the directory where JSON files will be stored.
41
49
  session_table (Optional[str]): Name of the JSON file to store sessions (without .json extension).
50
+ culture_table (Optional[str]): Name of the JSON file to store cultural knowledge.
42
51
  memory_table (Optional[str]): Name of the JSON file to store memories.
43
52
  metrics_table (Optional[str]): Name of the JSON file to store metrics.
44
53
  eval_table (Optional[str]): Name of the JSON file to store evaluation runs.
45
54
  knowledge_table (Optional[str]): Name of the JSON file to store knowledge content.
55
+ traces_table (Optional[str]): Name of the JSON file to store run traces.
56
+ spans_table (Optional[str]): Name of the JSON file to store span events.
46
57
  id (Optional[str]): ID of the database.
47
58
  """
48
59
  if id is None:
@@ -52,15 +63,22 @@ class JsonDb(BaseDb):
52
63
  super().__init__(
53
64
  id=id,
54
65
  session_table=session_table,
66
+ culture_table=culture_table,
55
67
  memory_table=memory_table,
56
68
  metrics_table=metrics_table,
57
69
  eval_table=eval_table,
58
70
  knowledge_table=knowledge_table,
71
+ traces_table=traces_table,
72
+ spans_table=spans_table,
59
73
  )
60
74
 
61
75
  # Create the directory where the JSON files will be stored, if it doesn't exist
62
76
  self.db_path = Path(db_path or os.path.join(os.getcwd(), "agno_json_db"))
63
77
 
78
+ def table_exists(self, table_name: str) -> bool:
79
+ """JSON implementation, always returns True."""
80
+ return True
81
+
64
82
  def _read_json_file(self, filename: str, create_table_if_not_found: Optional[bool] = True) -> List[Dict[str, Any]]:
65
83
  """Read data from a JSON file, creating it if it doesn't exist.
66
84
 
@@ -115,6 +133,14 @@ class JsonDb(BaseDb):
115
133
  log_error(f"Error writing to the {file_path} JSON file: {e}")
116
134
  raise e
117
135
 
136
+ def get_latest_schema_version(self):
137
+ """Get the latest version of the database schema."""
138
+ pass
139
+
140
+ def upsert_schema_version(self, version: str) -> None:
141
+ """Upsert the schema version into the database."""
142
+ pass
143
+
118
144
  # -- Session methods --
119
145
 
120
146
  def delete_session(self, session_id: str) -> bool:
@@ -196,21 +222,16 @@ class JsonDb(BaseDb):
196
222
  if session_data.get("session_id") == session_id:
197
223
  if user_id is not None and session_data.get("user_id") != user_id:
198
224
  continue
199
- session_type_value = session_type.value if isinstance(session_type, SessionType) else session_type
200
- if session_data.get("session_type") != session_type_value:
201
- continue
202
-
203
- session = hydrate_session(session_data)
204
225
 
205
226
  if not deserialize:
206
- return session
227
+ return session_data
207
228
 
208
229
  if session_type == SessionType.AGENT:
209
- return AgentSession.from_dict(session)
230
+ return AgentSession.from_dict(session_data)
210
231
  elif session_type == SessionType.TEAM:
211
- return TeamSession.from_dict(session)
232
+ return TeamSession.from_dict(session_data)
212
233
  elif session_type == SessionType.WORKFLOW:
213
- return WorkflowSession.from_dict(session)
234
+ return WorkflowSession.from_dict(session_data)
214
235
  else:
215
236
  raise ValueError(f"Invalid session type: {session_type}")
216
237
 
@@ -398,7 +419,7 @@ class JsonDb(BaseDb):
398
419
  raise e
399
420
 
400
421
  def upsert_sessions(
401
- self, sessions: List[Session], deserialize: Optional[bool] = True
422
+ self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
402
423
  ) -> List[Union[Session, Dict[str, Any]]]:
403
424
  """
404
425
  Bulk upsert multiple sessions for improved performance on large datasets.
@@ -621,13 +642,14 @@ class JsonDb(BaseDb):
621
642
  raise e
622
643
 
623
644
  def get_user_memory_stats(
624
- self, limit: Optional[int] = None, page: Optional[int] = None
645
+ self, limit: Optional[int] = None, page: Optional[int] = None, user_id: Optional[str] = None
625
646
  ) -> Tuple[List[Dict[str, Any]], int]:
626
647
  """Get user memory statistics.
627
648
 
628
649
  Args:
629
650
  limit (Optional[int]): The maximum number of user stats to return.
630
651
  page (Optional[int]): The page number.
652
+ user_id (Optional[str]): User ID for filtering.
631
653
 
632
654
  Returns:
633
655
  Tuple[List[Dict[str, Any]], int]: A list of dictionaries containing user stats and total count.
@@ -638,6 +660,9 @@ class JsonDb(BaseDb):
638
660
 
639
661
  for memory in memories:
640
662
  memory_user_id = memory.get("user_id")
663
+ # filter by user_id if provided
664
+ if user_id is not None and memory_user_id != user_id:
665
+ continue
641
666
  if memory_user_id:
642
667
  if memory_user_id not in user_stats:
643
668
  user_stats[memory_user_id] = {
@@ -703,7 +728,7 @@ class JsonDb(BaseDb):
703
728
  raise e
704
729
 
705
730
  def upsert_memories(
706
- self, memories: List[UserMemory], deserialize: Optional[bool] = True
731
+ self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
707
732
  ) -> List[Union[UserMemory, Dict[str, Any]]]:
708
733
  """
709
734
  Bulk upsert multiple user memories for improved performance on large datasets.
@@ -1199,3 +1224,554 @@ class JsonDb(BaseDb):
1199
1224
  except Exception as e:
1200
1225
  log_error(f"Error renaming eval run {eval_run_id}: {e}")
1201
1226
  raise e
1227
+
1228
+ # -- Culture methods --
1229
+
1230
+ def clear_cultural_knowledge(self) -> None:
1231
+ """Delete all cultural knowledge from JSON file."""
1232
+ try:
1233
+ self._write_json_file(self.culture_table_name, [])
1234
+ except Exception as e:
1235
+ log_error(f"Error clearing cultural knowledge: {e}")
1236
+ raise e
1237
+
1238
+ def delete_cultural_knowledge(self, id: str) -> None:
1239
+ """Delete a cultural knowledge entry from JSON file."""
1240
+ try:
1241
+ cultural_knowledge = self._read_json_file(self.culture_table_name)
1242
+ cultural_knowledge = [ck for ck in cultural_knowledge if ck.get("id") != id]
1243
+ self._write_json_file(self.culture_table_name, cultural_knowledge)
1244
+ except Exception as e:
1245
+ log_error(f"Error deleting cultural knowledge: {e}")
1246
+ raise e
1247
+
1248
+ def get_cultural_knowledge(
1249
+ self, id: str, deserialize: Optional[bool] = True
1250
+ ) -> Optional[Union[CulturalKnowledge, Dict[str, Any]]]:
1251
+ """Get a cultural knowledge entry from JSON file."""
1252
+ try:
1253
+ cultural_knowledge = self._read_json_file(self.culture_table_name)
1254
+ for ck in cultural_knowledge:
1255
+ if ck.get("id") == id:
1256
+ if not deserialize:
1257
+ return ck
1258
+ return deserialize_cultural_knowledge_from_db(ck)
1259
+ return None
1260
+ except Exception as e:
1261
+ log_error(f"Error getting cultural knowledge: {e}")
1262
+ raise e
1263
+
1264
+ def get_all_cultural_knowledge(
1265
+ self,
1266
+ name: Optional[str] = None,
1267
+ agent_id: Optional[str] = None,
1268
+ team_id: Optional[str] = None,
1269
+ limit: Optional[int] = None,
1270
+ page: Optional[int] = None,
1271
+ sort_by: Optional[str] = None,
1272
+ sort_order: Optional[str] = None,
1273
+ deserialize: Optional[bool] = True,
1274
+ ) -> Union[List[CulturalKnowledge], Tuple[List[Dict[str, Any]], int]]:
1275
+ """Get all cultural knowledge from JSON file."""
1276
+ try:
1277
+ cultural_knowledge = self._read_json_file(self.culture_table_name)
1278
+
1279
+ # Filter
1280
+ filtered = []
1281
+ for ck in cultural_knowledge:
1282
+ if name and ck.get("name") != name:
1283
+ continue
1284
+ if agent_id and ck.get("agent_id") != agent_id:
1285
+ continue
1286
+ if team_id and ck.get("team_id") != team_id:
1287
+ continue
1288
+ filtered.append(ck)
1289
+
1290
+ # Sort
1291
+ if sort_by:
1292
+ filtered = apply_sorting(filtered, sort_by, sort_order)
1293
+
1294
+ total_count = len(filtered)
1295
+
1296
+ # Paginate
1297
+ if limit and page:
1298
+ start = (page - 1) * limit
1299
+ filtered = filtered[start : start + limit]
1300
+ elif limit:
1301
+ filtered = filtered[:limit]
1302
+
1303
+ if not deserialize:
1304
+ return filtered, total_count
1305
+
1306
+ return [deserialize_cultural_knowledge_from_db(ck) for ck in filtered]
1307
+ except Exception as e:
1308
+ log_error(f"Error getting all cultural knowledge: {e}")
1309
+ raise e
1310
+
1311
+ def upsert_cultural_knowledge(
1312
+ self, cultural_knowledge: CulturalKnowledge, deserialize: Optional[bool] = True
1313
+ ) -> Optional[Union[CulturalKnowledge, Dict[str, Any]]]:
1314
+ """Upsert a cultural knowledge entry into JSON file."""
1315
+ try:
1316
+ if not cultural_knowledge.id:
1317
+ cultural_knowledge.id = str(uuid4())
1318
+
1319
+ all_cultural_knowledge = self._read_json_file(self.culture_table_name, create_table_if_not_found=True)
1320
+
1321
+ # Serialize content, categories, and notes into a dict for DB storage
1322
+ content_dict = serialize_cultural_knowledge_for_db(cultural_knowledge)
1323
+
1324
+ # Create the item dict with serialized content
1325
+ ck_dict = {
1326
+ "id": cultural_knowledge.id,
1327
+ "name": cultural_knowledge.name,
1328
+ "summary": cultural_knowledge.summary,
1329
+ "content": content_dict if content_dict else None,
1330
+ "metadata": cultural_knowledge.metadata,
1331
+ "input": cultural_knowledge.input,
1332
+ "created_at": cultural_knowledge.created_at,
1333
+ "updated_at": int(time.time()),
1334
+ "agent_id": cultural_knowledge.agent_id,
1335
+ "team_id": cultural_knowledge.team_id,
1336
+ }
1337
+
1338
+ # Remove existing entry
1339
+ all_cultural_knowledge = [ck for ck in all_cultural_knowledge if ck.get("id") != cultural_knowledge.id]
1340
+
1341
+ # Add new entry
1342
+ all_cultural_knowledge.append(ck_dict)
1343
+
1344
+ self._write_json_file(self.culture_table_name, all_cultural_knowledge)
1345
+
1346
+ return self.get_cultural_knowledge(cultural_knowledge.id, deserialize=deserialize)
1347
+ except Exception as e:
1348
+ log_error(f"Error upserting cultural knowledge: {e}")
1349
+ raise e
1350
+
1351
+ # --- Traces ---
1352
+ def upsert_trace(self, trace: "Trace") -> None:
1353
+ """Create or update a single trace record in the database.
1354
+
1355
+ Args:
1356
+ trace: The Trace object to store (one per trace_id).
1357
+ """
1358
+ try:
1359
+ traces = self._read_json_file(self.trace_table_name, create_table_if_not_found=True)
1360
+
1361
+ # Check if trace exists
1362
+ existing_idx = None
1363
+ for i, existing in enumerate(traces):
1364
+ if existing.get("trace_id") == trace.trace_id:
1365
+ existing_idx = i
1366
+ break
1367
+
1368
+ if existing_idx is not None:
1369
+ existing = traces[existing_idx]
1370
+
1371
+ # workflow (level 3) > team (level 2) > agent (level 1) > child/unknown (level 0)
1372
+ def get_component_level(workflow_id, team_id, agent_id, name):
1373
+ is_root_name = ".run" in name or ".arun" in name
1374
+ if not is_root_name:
1375
+ return 0
1376
+ elif workflow_id:
1377
+ return 3
1378
+ elif team_id:
1379
+ return 2
1380
+ elif agent_id:
1381
+ return 1
1382
+ else:
1383
+ return 0
1384
+
1385
+ existing_level = get_component_level(
1386
+ existing.get("workflow_id"),
1387
+ existing.get("team_id"),
1388
+ existing.get("agent_id"),
1389
+ existing.get("name", ""),
1390
+ )
1391
+ new_level = get_component_level(trace.workflow_id, trace.team_id, trace.agent_id, trace.name)
1392
+ should_update_name = new_level > existing_level
1393
+
1394
+ # Parse existing start_time to calculate correct duration
1395
+ existing_start_time_str = existing.get("start_time")
1396
+ if isinstance(existing_start_time_str, str):
1397
+ existing_start_time = datetime.fromisoformat(existing_start_time_str.replace("Z", "+00:00"))
1398
+ else:
1399
+ existing_start_time = trace.start_time
1400
+
1401
+ recalculated_duration_ms = int((trace.end_time - existing_start_time).total_seconds() * 1000)
1402
+
1403
+ # Update existing trace
1404
+ existing["end_time"] = trace.end_time.isoformat()
1405
+ existing["duration_ms"] = recalculated_duration_ms
1406
+ existing["status"] = trace.status
1407
+ if should_update_name:
1408
+ existing["name"] = trace.name
1409
+
1410
+ # Update context fields only if new value is not None
1411
+ if trace.run_id is not None:
1412
+ existing["run_id"] = trace.run_id
1413
+ if trace.session_id is not None:
1414
+ existing["session_id"] = trace.session_id
1415
+ if trace.user_id is not None:
1416
+ existing["user_id"] = trace.user_id
1417
+ if trace.agent_id is not None:
1418
+ existing["agent_id"] = trace.agent_id
1419
+ if trace.team_id is not None:
1420
+ existing["team_id"] = trace.team_id
1421
+ if trace.workflow_id is not None:
1422
+ existing["workflow_id"] = trace.workflow_id
1423
+
1424
+ traces[existing_idx] = existing
1425
+ else:
1426
+ # Add new trace
1427
+ trace_dict = trace.to_dict()
1428
+ trace_dict.pop("total_spans", None)
1429
+ trace_dict.pop("error_count", None)
1430
+ traces.append(trace_dict)
1431
+
1432
+ self._write_json_file(self.trace_table_name, traces)
1433
+
1434
+ except Exception as e:
1435
+ log_error(f"Error creating trace: {e}")
1436
+
1437
+ def get_trace(
1438
+ self,
1439
+ trace_id: Optional[str] = None,
1440
+ run_id: Optional[str] = None,
1441
+ ):
1442
+ """Get a single trace by trace_id or other filters.
1443
+
1444
+ Args:
1445
+ trace_id: The unique trace identifier.
1446
+ run_id: Filter by run ID (returns first match).
1447
+
1448
+ Returns:
1449
+ Optional[Trace]: The trace if found, None otherwise.
1450
+ """
1451
+ try:
1452
+ from agno.tracing.schemas import Trace
1453
+
1454
+ traces = self._read_json_file(self.trace_table_name, create_table_if_not_found=False)
1455
+ if not traces:
1456
+ return None
1457
+
1458
+ # Get spans for calculating total_spans and error_count
1459
+ spans = self._read_json_file(self.span_table_name, create_table_if_not_found=False)
1460
+
1461
+ # Filter traces
1462
+ filtered = []
1463
+ for t in traces:
1464
+ if trace_id and t.get("trace_id") == trace_id:
1465
+ filtered.append(t)
1466
+ break
1467
+ elif run_id and t.get("run_id") == run_id:
1468
+ filtered.append(t)
1469
+
1470
+ if not filtered:
1471
+ return None
1472
+
1473
+ # Sort by start_time desc and get first
1474
+ filtered.sort(key=lambda x: x.get("start_time", ""), reverse=True)
1475
+ trace_data = filtered[0]
1476
+
1477
+ # Calculate total_spans and error_count
1478
+ trace_spans = [s for s in spans if s.get("trace_id") == trace_data.get("trace_id")]
1479
+ trace_data["total_spans"] = len(trace_spans)
1480
+ trace_data["error_count"] = sum(1 for s in trace_spans if s.get("status_code") == "ERROR")
1481
+
1482
+ return Trace.from_dict(trace_data)
1483
+
1484
+ except Exception as e:
1485
+ log_error(f"Error getting trace: {e}")
1486
+ return None
1487
+
1488
+ def get_traces(
1489
+ self,
1490
+ run_id: Optional[str] = None,
1491
+ session_id: Optional[str] = None,
1492
+ user_id: Optional[str] = None,
1493
+ agent_id: Optional[str] = None,
1494
+ team_id: Optional[str] = None,
1495
+ workflow_id: Optional[str] = None,
1496
+ status: Optional[str] = None,
1497
+ start_time: Optional[datetime] = None,
1498
+ end_time: Optional[datetime] = None,
1499
+ limit: Optional[int] = 20,
1500
+ page: Optional[int] = 1,
1501
+ ) -> tuple[List, int]:
1502
+ """Get traces matching the provided filters with pagination.
1503
+
1504
+ Args:
1505
+ run_id: Filter by run ID.
1506
+ session_id: Filter by session ID.
1507
+ user_id: Filter by user ID.
1508
+ agent_id: Filter by agent ID.
1509
+ team_id: Filter by team ID.
1510
+ workflow_id: Filter by workflow ID.
1511
+ status: Filter by status (OK, ERROR, UNSET).
1512
+ start_time: Filter traces starting after this datetime.
1513
+ end_time: Filter traces ending before this datetime.
1514
+ limit: Maximum number of traces to return per page.
1515
+ page: Page number (1-indexed).
1516
+
1517
+ Returns:
1518
+ tuple[List[Trace], int]: Tuple of (list of matching traces, total count).
1519
+ """
1520
+ try:
1521
+ from agno.tracing.schemas import Trace
1522
+
1523
+ traces = self._read_json_file(self.trace_table_name, create_table_if_not_found=False)
1524
+ if not traces:
1525
+ return [], 0
1526
+
1527
+ # Get spans for calculating total_spans and error_count
1528
+ spans = self._read_json_file(self.span_table_name, create_table_if_not_found=False)
1529
+
1530
+ # Apply filters
1531
+ filtered = []
1532
+ for t in traces:
1533
+ if run_id and t.get("run_id") != run_id:
1534
+ continue
1535
+ if session_id and t.get("session_id") != session_id:
1536
+ continue
1537
+ if user_id and t.get("user_id") != user_id:
1538
+ continue
1539
+ if agent_id and t.get("agent_id") != agent_id:
1540
+ continue
1541
+ if team_id and t.get("team_id") != team_id:
1542
+ continue
1543
+ if workflow_id and t.get("workflow_id") != workflow_id:
1544
+ continue
1545
+ if status and t.get("status") != status:
1546
+ continue
1547
+ if start_time:
1548
+ trace_start = t.get("start_time", "")
1549
+ if trace_start < start_time.isoformat():
1550
+ continue
1551
+ if end_time:
1552
+ trace_end = t.get("end_time", "")
1553
+ if trace_end > end_time.isoformat():
1554
+ continue
1555
+ filtered.append(t)
1556
+
1557
+ total_count = len(filtered)
1558
+
1559
+ # Sort by start_time desc
1560
+ filtered.sort(key=lambda x: x.get("start_time", ""), reverse=True)
1561
+
1562
+ # Apply pagination
1563
+ if limit and page:
1564
+ start_idx = (page - 1) * limit
1565
+ filtered = filtered[start_idx : start_idx + limit]
1566
+
1567
+ # Add total_spans and error_count to each trace
1568
+ result_traces = []
1569
+ for t in filtered:
1570
+ trace_spans = [s for s in spans if s.get("trace_id") == t.get("trace_id")]
1571
+ t["total_spans"] = len(trace_spans)
1572
+ t["error_count"] = sum(1 for s in trace_spans if s.get("status_code") == "ERROR")
1573
+ result_traces.append(Trace.from_dict(t))
1574
+
1575
+ return result_traces, total_count
1576
+
1577
+ except Exception as e:
1578
+ log_error(f"Error getting traces: {e}")
1579
+ return [], 0
1580
+
1581
+ def get_trace_stats(
1582
+ self,
1583
+ user_id: Optional[str] = None,
1584
+ agent_id: Optional[str] = None,
1585
+ team_id: Optional[str] = None,
1586
+ workflow_id: Optional[str] = None,
1587
+ start_time: Optional[datetime] = None,
1588
+ end_time: Optional[datetime] = None,
1589
+ limit: Optional[int] = 20,
1590
+ page: Optional[int] = 1,
1591
+ ) -> tuple[List[Dict[str, Any]], int]:
1592
+ """Get trace statistics grouped by session.
1593
+
1594
+ Args:
1595
+ user_id: Filter by user ID.
1596
+ agent_id: Filter by agent ID.
1597
+ team_id: Filter by team ID.
1598
+ workflow_id: Filter by workflow ID.
1599
+ start_time: Filter sessions with traces created after this datetime.
1600
+ end_time: Filter sessions with traces created before this datetime.
1601
+ limit: Maximum number of sessions to return per page.
1602
+ page: Page number (1-indexed).
1603
+
1604
+ Returns:
1605
+ tuple[List[Dict], int]: Tuple of (list of session stats dicts, total count).
1606
+ """
1607
+ try:
1608
+ traces = self._read_json_file(self.trace_table_name, create_table_if_not_found=False)
1609
+ if not traces:
1610
+ return [], 0
1611
+
1612
+ # Group by session_id
1613
+ session_stats: Dict[str, Dict[str, Any]] = {}
1614
+
1615
+ for t in traces:
1616
+ session_id = t.get("session_id")
1617
+ if not session_id:
1618
+ continue
1619
+
1620
+ # Apply filters
1621
+ if user_id and t.get("user_id") != user_id:
1622
+ continue
1623
+ if agent_id and t.get("agent_id") != agent_id:
1624
+ continue
1625
+ if team_id and t.get("team_id") != team_id:
1626
+ continue
1627
+ if workflow_id and t.get("workflow_id") != workflow_id:
1628
+ continue
1629
+
1630
+ created_at = t.get("created_at", "")
1631
+ if start_time and created_at < start_time.isoformat():
1632
+ continue
1633
+ if end_time and created_at > end_time.isoformat():
1634
+ continue
1635
+
1636
+ if session_id not in session_stats:
1637
+ session_stats[session_id] = {
1638
+ "session_id": session_id,
1639
+ "user_id": t.get("user_id"),
1640
+ "agent_id": t.get("agent_id"),
1641
+ "team_id": t.get("team_id"),
1642
+ "workflow_id": t.get("workflow_id"),
1643
+ "total_traces": 0,
1644
+ "first_trace_at": created_at,
1645
+ "last_trace_at": created_at,
1646
+ }
1647
+
1648
+ session_stats[session_id]["total_traces"] += 1
1649
+ if created_at < session_stats[session_id]["first_trace_at"]:
1650
+ session_stats[session_id]["first_trace_at"] = created_at
1651
+ if created_at > session_stats[session_id]["last_trace_at"]:
1652
+ session_stats[session_id]["last_trace_at"] = created_at
1653
+
1654
+ stats_list = list(session_stats.values())
1655
+ total_count = len(stats_list)
1656
+
1657
+ # Sort by last_trace_at desc
1658
+ stats_list.sort(key=lambda x: x.get("last_trace_at", ""), reverse=True)
1659
+
1660
+ # Apply pagination
1661
+ if limit and page:
1662
+ start_idx = (page - 1) * limit
1663
+ stats_list = stats_list[start_idx : start_idx + limit]
1664
+
1665
+ # Convert ISO strings to datetime objects
1666
+ for stat in stats_list:
1667
+ first_at = stat.get("first_trace_at", "")
1668
+ last_at = stat.get("last_trace_at", "")
1669
+ if first_at:
1670
+ stat["first_trace_at"] = datetime.fromisoformat(first_at.replace("Z", "+00:00"))
1671
+ if last_at:
1672
+ stat["last_trace_at"] = datetime.fromisoformat(last_at.replace("Z", "+00:00"))
1673
+
1674
+ return stats_list, total_count
1675
+
1676
+ except Exception as e:
1677
+ log_error(f"Error getting trace stats: {e}")
1678
+ return [], 0
1679
+
1680
+ # --- Spans ---
1681
+ def create_span(self, span: "Span") -> None:
1682
+ """Create a single span in the database.
1683
+
1684
+ Args:
1685
+ span: The Span object to store.
1686
+ """
1687
+ try:
1688
+ spans = self._read_json_file(self.span_table_name, create_table_if_not_found=True)
1689
+ spans.append(span.to_dict())
1690
+ self._write_json_file(self.span_table_name, spans)
1691
+
1692
+ except Exception as e:
1693
+ log_error(f"Error creating span: {e}")
1694
+
1695
+ def create_spans(self, spans: List) -> None:
1696
+ """Create multiple spans in the database as a batch.
1697
+
1698
+ Args:
1699
+ spans: List of Span objects to store.
1700
+ """
1701
+ if not spans:
1702
+ return
1703
+
1704
+ try:
1705
+ existing_spans = self._read_json_file(self.span_table_name, create_table_if_not_found=True)
1706
+ for span in spans:
1707
+ existing_spans.append(span.to_dict())
1708
+ self._write_json_file(self.span_table_name, existing_spans)
1709
+
1710
+ except Exception as e:
1711
+ log_error(f"Error creating spans batch: {e}")
1712
+
1713
+ def get_span(self, span_id: str):
1714
+ """Get a single span by its span_id.
1715
+
1716
+ Args:
1717
+ span_id: The unique span identifier.
1718
+
1719
+ Returns:
1720
+ Optional[Span]: The span if found, None otherwise.
1721
+ """
1722
+ try:
1723
+ from agno.tracing.schemas import Span
1724
+
1725
+ spans = self._read_json_file(self.span_table_name, create_table_if_not_found=False)
1726
+
1727
+ for s in spans:
1728
+ if s.get("span_id") == span_id:
1729
+ return Span.from_dict(s)
1730
+
1731
+ return None
1732
+
1733
+ except Exception as e:
1734
+ log_error(f"Error getting span: {e}")
1735
+ return None
1736
+
1737
+ def get_spans(
1738
+ self,
1739
+ trace_id: Optional[str] = None,
1740
+ parent_span_id: Optional[str] = None,
1741
+ limit: Optional[int] = 1000,
1742
+ ) -> List:
1743
+ """Get spans matching the provided filters.
1744
+
1745
+ Args:
1746
+ trace_id: Filter by trace ID.
1747
+ parent_span_id: Filter by parent span ID.
1748
+ limit: Maximum number of spans to return.
1749
+
1750
+ Returns:
1751
+ List[Span]: List of matching spans.
1752
+ """
1753
+ try:
1754
+ from agno.tracing.schemas import Span
1755
+
1756
+ spans = self._read_json_file(self.span_table_name, create_table_if_not_found=False)
1757
+ if not spans:
1758
+ return []
1759
+
1760
+ # Apply filters
1761
+ filtered = []
1762
+ for s in spans:
1763
+ if trace_id and s.get("trace_id") != trace_id:
1764
+ continue
1765
+ if parent_span_id and s.get("parent_span_id") != parent_span_id:
1766
+ continue
1767
+ filtered.append(s)
1768
+
1769
+ # Apply limit
1770
+ if limit:
1771
+ filtered = filtered[:limit]
1772
+
1773
+ return [Span.from_dict(s) for s in filtered]
1774
+
1775
+ except Exception as e:
1776
+ log_error(f"Error getting spans: {e}")
1777
+ return []