agno 2.0.0rc2__py3-none-any.whl → 2.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agno/agent/agent.py +6009 -2874
- agno/api/api.py +2 -0
- agno/api/os.py +1 -1
- agno/culture/__init__.py +3 -0
- agno/culture/manager.py +956 -0
- agno/db/async_postgres/__init__.py +3 -0
- agno/db/base.py +385 -6
- agno/db/dynamo/dynamo.py +388 -81
- agno/db/dynamo/schemas.py +47 -10
- agno/db/dynamo/utils.py +63 -4
- agno/db/firestore/firestore.py +435 -64
- agno/db/firestore/schemas.py +11 -0
- agno/db/firestore/utils.py +102 -4
- agno/db/gcs_json/gcs_json_db.py +384 -42
- agno/db/gcs_json/utils.py +60 -26
- agno/db/in_memory/in_memory_db.py +351 -66
- agno/db/in_memory/utils.py +60 -2
- agno/db/json/json_db.py +339 -48
- agno/db/json/utils.py +60 -26
- agno/db/migrations/manager.py +199 -0
- agno/db/migrations/v1_to_v2.py +510 -37
- agno/db/migrations/versions/__init__.py +0 -0
- agno/db/migrations/versions/v2_3_0.py +938 -0
- agno/db/mongo/__init__.py +15 -1
- agno/db/mongo/async_mongo.py +2036 -0
- agno/db/mongo/mongo.py +653 -76
- agno/db/mongo/schemas.py +13 -0
- agno/db/mongo/utils.py +80 -8
- agno/db/mysql/mysql.py +687 -25
- agno/db/mysql/schemas.py +61 -37
- agno/db/mysql/utils.py +60 -2
- agno/db/postgres/__init__.py +2 -1
- agno/db/postgres/async_postgres.py +2001 -0
- agno/db/postgres/postgres.py +676 -57
- agno/db/postgres/schemas.py +43 -18
- agno/db/postgres/utils.py +164 -2
- agno/db/redis/redis.py +344 -38
- agno/db/redis/schemas.py +18 -0
- agno/db/redis/utils.py +60 -2
- agno/db/schemas/__init__.py +2 -1
- agno/db/schemas/culture.py +120 -0
- agno/db/schemas/memory.py +13 -0
- agno/db/singlestore/schemas.py +26 -1
- agno/db/singlestore/singlestore.py +687 -53
- agno/db/singlestore/utils.py +60 -2
- agno/db/sqlite/__init__.py +2 -1
- agno/db/sqlite/async_sqlite.py +2371 -0
- agno/db/sqlite/schemas.py +24 -0
- agno/db/sqlite/sqlite.py +774 -85
- agno/db/sqlite/utils.py +168 -5
- agno/db/surrealdb/__init__.py +3 -0
- agno/db/surrealdb/metrics.py +292 -0
- agno/db/surrealdb/models.py +309 -0
- agno/db/surrealdb/queries.py +71 -0
- agno/db/surrealdb/surrealdb.py +1361 -0
- agno/db/surrealdb/utils.py +147 -0
- agno/db/utils.py +50 -22
- agno/eval/accuracy.py +50 -43
- agno/eval/performance.py +6 -3
- agno/eval/reliability.py +6 -3
- agno/eval/utils.py +33 -16
- agno/exceptions.py +68 -1
- agno/filters.py +354 -0
- agno/guardrails/__init__.py +6 -0
- agno/guardrails/base.py +19 -0
- agno/guardrails/openai.py +144 -0
- agno/guardrails/pii.py +94 -0
- agno/guardrails/prompt_injection.py +52 -0
- agno/integrations/discord/client.py +1 -0
- agno/knowledge/chunking/agentic.py +13 -10
- agno/knowledge/chunking/fixed.py +1 -1
- agno/knowledge/chunking/semantic.py +40 -8
- agno/knowledge/chunking/strategy.py +59 -15
- agno/knowledge/embedder/aws_bedrock.py +9 -4
- agno/knowledge/embedder/azure_openai.py +54 -0
- agno/knowledge/embedder/base.py +2 -0
- agno/knowledge/embedder/cohere.py +184 -5
- agno/knowledge/embedder/fastembed.py +1 -1
- agno/knowledge/embedder/google.py +79 -1
- agno/knowledge/embedder/huggingface.py +9 -4
- agno/knowledge/embedder/jina.py +63 -0
- agno/knowledge/embedder/mistral.py +78 -11
- agno/knowledge/embedder/nebius.py +1 -1
- agno/knowledge/embedder/ollama.py +13 -0
- agno/knowledge/embedder/openai.py +37 -65
- agno/knowledge/embedder/sentence_transformer.py +8 -4
- agno/knowledge/embedder/vllm.py +262 -0
- agno/knowledge/embedder/voyageai.py +69 -16
- agno/knowledge/knowledge.py +595 -187
- agno/knowledge/reader/base.py +9 -2
- agno/knowledge/reader/csv_reader.py +8 -10
- agno/knowledge/reader/docx_reader.py +5 -6
- agno/knowledge/reader/field_labeled_csv_reader.py +290 -0
- agno/knowledge/reader/json_reader.py +6 -5
- agno/knowledge/reader/markdown_reader.py +13 -13
- agno/knowledge/reader/pdf_reader.py +43 -68
- agno/knowledge/reader/pptx_reader.py +101 -0
- agno/knowledge/reader/reader_factory.py +51 -6
- agno/knowledge/reader/s3_reader.py +3 -15
- agno/knowledge/reader/tavily_reader.py +194 -0
- agno/knowledge/reader/text_reader.py +13 -13
- agno/knowledge/reader/web_search_reader.py +2 -43
- agno/knowledge/reader/website_reader.py +43 -25
- agno/knowledge/reranker/__init__.py +3 -0
- agno/knowledge/types.py +9 -0
- agno/knowledge/utils.py +20 -0
- agno/media.py +339 -266
- agno/memory/manager.py +336 -82
- agno/models/aimlapi/aimlapi.py +2 -2
- agno/models/anthropic/claude.py +183 -37
- agno/models/aws/bedrock.py +52 -112
- agno/models/aws/claude.py +33 -1
- agno/models/azure/ai_foundry.py +33 -15
- agno/models/azure/openai_chat.py +25 -8
- agno/models/base.py +1011 -566
- agno/models/cerebras/cerebras.py +19 -13
- agno/models/cerebras/cerebras_openai.py +8 -5
- agno/models/cohere/chat.py +27 -1
- agno/models/cometapi/__init__.py +5 -0
- agno/models/cometapi/cometapi.py +57 -0
- agno/models/dashscope/dashscope.py +1 -0
- agno/models/deepinfra/deepinfra.py +2 -2
- agno/models/deepseek/deepseek.py +2 -2
- agno/models/fireworks/fireworks.py +2 -2
- agno/models/google/gemini.py +110 -37
- agno/models/groq/groq.py +28 -11
- agno/models/huggingface/huggingface.py +2 -1
- agno/models/internlm/internlm.py +2 -2
- agno/models/langdb/langdb.py +4 -4
- agno/models/litellm/chat.py +18 -1
- agno/models/litellm/litellm_openai.py +2 -2
- agno/models/llama_cpp/__init__.py +5 -0
- agno/models/llama_cpp/llama_cpp.py +22 -0
- agno/models/message.py +143 -4
- agno/models/meta/llama.py +27 -10
- agno/models/meta/llama_openai.py +5 -17
- agno/models/nebius/nebius.py +6 -6
- agno/models/nexus/__init__.py +3 -0
- agno/models/nexus/nexus.py +22 -0
- agno/models/nvidia/nvidia.py +2 -2
- agno/models/ollama/chat.py +60 -6
- agno/models/openai/chat.py +102 -43
- agno/models/openai/responses.py +103 -106
- agno/models/openrouter/openrouter.py +41 -3
- agno/models/perplexity/perplexity.py +4 -5
- agno/models/portkey/portkey.py +3 -3
- agno/models/requesty/__init__.py +5 -0
- agno/models/requesty/requesty.py +52 -0
- agno/models/response.py +81 -5
- agno/models/sambanova/sambanova.py +2 -2
- agno/models/siliconflow/__init__.py +5 -0
- agno/models/siliconflow/siliconflow.py +25 -0
- agno/models/together/together.py +2 -2
- agno/models/utils.py +254 -8
- agno/models/vercel/v0.py +2 -2
- agno/models/vertexai/__init__.py +0 -0
- agno/models/vertexai/claude.py +96 -0
- agno/models/vllm/vllm.py +1 -0
- agno/models/xai/xai.py +3 -2
- agno/os/app.py +543 -175
- agno/os/auth.py +24 -14
- agno/os/config.py +1 -0
- agno/os/interfaces/__init__.py +1 -0
- agno/os/interfaces/a2a/__init__.py +3 -0
- agno/os/interfaces/a2a/a2a.py +42 -0
- agno/os/interfaces/a2a/router.py +250 -0
- agno/os/interfaces/a2a/utils.py +924 -0
- agno/os/interfaces/agui/agui.py +23 -7
- agno/os/interfaces/agui/router.py +27 -3
- agno/os/interfaces/agui/utils.py +242 -142
- agno/os/interfaces/base.py +6 -2
- agno/os/interfaces/slack/router.py +81 -23
- agno/os/interfaces/slack/slack.py +29 -14
- agno/os/interfaces/whatsapp/router.py +11 -4
- agno/os/interfaces/whatsapp/whatsapp.py +14 -7
- agno/os/mcp.py +111 -54
- agno/os/middleware/__init__.py +7 -0
- agno/os/middleware/jwt.py +233 -0
- agno/os/router.py +556 -139
- agno/os/routers/evals/evals.py +71 -34
- agno/os/routers/evals/schemas.py +31 -31
- agno/os/routers/evals/utils.py +6 -5
- agno/os/routers/health.py +31 -0
- agno/os/routers/home.py +52 -0
- agno/os/routers/knowledge/knowledge.py +185 -38
- agno/os/routers/knowledge/schemas.py +82 -22
- agno/os/routers/memory/memory.py +158 -53
- agno/os/routers/memory/schemas.py +20 -16
- agno/os/routers/metrics/metrics.py +20 -8
- agno/os/routers/metrics/schemas.py +16 -16
- agno/os/routers/session/session.py +499 -38
- agno/os/schema.py +308 -198
- agno/os/utils.py +401 -41
- agno/reasoning/anthropic.py +80 -0
- agno/reasoning/azure_ai_foundry.py +2 -2
- agno/reasoning/deepseek.py +2 -2
- agno/reasoning/default.py +3 -1
- agno/reasoning/gemini.py +73 -0
- agno/reasoning/groq.py +2 -2
- agno/reasoning/ollama.py +2 -2
- agno/reasoning/openai.py +7 -2
- agno/reasoning/vertexai.py +76 -0
- agno/run/__init__.py +6 -0
- agno/run/agent.py +266 -112
- agno/run/base.py +53 -24
- agno/run/team.py +252 -111
- agno/run/workflow.py +156 -45
- agno/session/agent.py +105 -89
- agno/session/summary.py +65 -25
- agno/session/team.py +176 -96
- agno/session/workflow.py +406 -40
- agno/team/team.py +3854 -1692
- agno/tools/brightdata.py +3 -3
- agno/tools/cartesia.py +3 -5
- agno/tools/dalle.py +9 -8
- agno/tools/decorator.py +4 -2
- agno/tools/desi_vocal.py +2 -2
- agno/tools/duckduckgo.py +15 -11
- agno/tools/e2b.py +20 -13
- agno/tools/eleven_labs.py +26 -28
- agno/tools/exa.py +21 -16
- agno/tools/fal.py +4 -4
- agno/tools/file.py +153 -23
- agno/tools/file_generation.py +350 -0
- agno/tools/firecrawl.py +4 -4
- agno/tools/function.py +257 -37
- agno/tools/giphy.py +2 -2
- agno/tools/gmail.py +238 -14
- agno/tools/google_drive.py +270 -0
- agno/tools/googlecalendar.py +36 -8
- agno/tools/googlesheets.py +20 -5
- agno/tools/jira.py +20 -0
- agno/tools/knowledge.py +3 -3
- agno/tools/lumalab.py +3 -3
- agno/tools/mcp/__init__.py +10 -0
- agno/tools/mcp/mcp.py +331 -0
- agno/tools/mcp/multi_mcp.py +347 -0
- agno/tools/mcp/params.py +24 -0
- agno/tools/mcp_toolbox.py +284 -0
- agno/tools/mem0.py +11 -17
- agno/tools/memori.py +1 -53
- agno/tools/memory.py +419 -0
- agno/tools/models/azure_openai.py +2 -2
- agno/tools/models/gemini.py +3 -3
- agno/tools/models/groq.py +3 -5
- agno/tools/models/nebius.py +7 -7
- agno/tools/models_labs.py +25 -15
- agno/tools/notion.py +204 -0
- agno/tools/openai.py +4 -9
- agno/tools/opencv.py +3 -3
- agno/tools/parallel.py +314 -0
- agno/tools/replicate.py +7 -7
- agno/tools/scrapegraph.py +58 -31
- agno/tools/searxng.py +2 -2
- agno/tools/serper.py +2 -2
- agno/tools/slack.py +18 -3
- agno/tools/spider.py +2 -2
- agno/tools/tavily.py +146 -0
- agno/tools/whatsapp.py +1 -1
- agno/tools/workflow.py +278 -0
- agno/tools/yfinance.py +12 -11
- agno/utils/agent.py +820 -0
- agno/utils/audio.py +27 -0
- agno/utils/common.py +90 -1
- agno/utils/events.py +222 -7
- agno/utils/gemini.py +181 -23
- agno/utils/hooks.py +57 -0
- agno/utils/http.py +111 -0
- agno/utils/knowledge.py +12 -5
- agno/utils/log.py +1 -0
- agno/utils/mcp.py +95 -5
- agno/utils/media.py +188 -10
- agno/utils/merge_dict.py +22 -1
- agno/utils/message.py +60 -0
- agno/utils/models/claude.py +40 -11
- agno/utils/models/cohere.py +1 -1
- agno/utils/models/watsonx.py +1 -1
- agno/utils/openai.py +1 -1
- agno/utils/print_response/agent.py +105 -21
- agno/utils/print_response/team.py +103 -38
- agno/utils/print_response/workflow.py +251 -34
- agno/utils/reasoning.py +22 -1
- agno/utils/serialize.py +32 -0
- agno/utils/streamlit.py +16 -10
- agno/utils/string.py +41 -0
- agno/utils/team.py +98 -9
- agno/utils/tools.py +1 -1
- agno/vectordb/base.py +23 -4
- agno/vectordb/cassandra/cassandra.py +65 -9
- agno/vectordb/chroma/chromadb.py +182 -38
- agno/vectordb/clickhouse/clickhousedb.py +64 -11
- agno/vectordb/couchbase/couchbase.py +105 -10
- agno/vectordb/lancedb/lance_db.py +183 -135
- agno/vectordb/langchaindb/langchaindb.py +25 -7
- agno/vectordb/lightrag/lightrag.py +17 -3
- agno/vectordb/llamaindex/__init__.py +3 -0
- agno/vectordb/llamaindex/llamaindexdb.py +46 -7
- agno/vectordb/milvus/milvus.py +126 -9
- agno/vectordb/mongodb/__init__.py +7 -1
- agno/vectordb/mongodb/mongodb.py +112 -7
- agno/vectordb/pgvector/pgvector.py +142 -21
- agno/vectordb/pineconedb/pineconedb.py +80 -8
- agno/vectordb/qdrant/qdrant.py +125 -39
- agno/vectordb/redis/__init__.py +9 -0
- agno/vectordb/redis/redisdb.py +694 -0
- agno/vectordb/singlestore/singlestore.py +111 -25
- agno/vectordb/surrealdb/surrealdb.py +31 -5
- agno/vectordb/upstashdb/upstashdb.py +76 -8
- agno/vectordb/weaviate/weaviate.py +86 -15
- agno/workflow/__init__.py +2 -0
- agno/workflow/agent.py +299 -0
- agno/workflow/condition.py +112 -18
- agno/workflow/loop.py +69 -10
- agno/workflow/parallel.py +266 -118
- agno/workflow/router.py +110 -17
- agno/workflow/step.py +645 -136
- agno/workflow/steps.py +65 -6
- agno/workflow/types.py +71 -33
- agno/workflow/workflow.py +2113 -300
- agno-2.3.0.dist-info/METADATA +618 -0
- agno-2.3.0.dist-info/RECORD +577 -0
- agno-2.3.0.dist-info/licenses/LICENSE +201 -0
- agno/knowledge/reader/url_reader.py +0 -128
- agno/tools/googlesearch.py +0 -98
- agno/tools/mcp.py +0 -610
- agno/utils/models/aws_claude.py +0 -170
- agno-2.0.0rc2.dist-info/METADATA +0 -355
- agno-2.0.0rc2.dist-info/RECORD +0 -515
- agno-2.0.0rc2.dist-info/licenses/LICENSE +0 -375
- {agno-2.0.0rc2.dist-info → agno-2.3.0.dist-info}/WHEEL +0 -0
- {agno-2.0.0rc2.dist-info → agno-2.3.0.dist-info}/top_level.txt +0 -0
agno/db/json/json_db.py
CHANGED
|
@@ -10,15 +10,18 @@ from agno.db.base import BaseDb, SessionType
|
|
|
10
10
|
from agno.db.json.utils import (
|
|
11
11
|
apply_sorting,
|
|
12
12
|
calculate_date_metrics,
|
|
13
|
+
deserialize_cultural_knowledge_from_db,
|
|
13
14
|
fetch_all_sessions_data,
|
|
14
15
|
get_dates_to_calculate_metrics_for,
|
|
15
|
-
|
|
16
|
+
serialize_cultural_knowledge_for_db,
|
|
16
17
|
)
|
|
18
|
+
from agno.db.schemas.culture import CulturalKnowledge
|
|
17
19
|
from agno.db.schemas.evals import EvalFilterType, EvalRunRecord, EvalType
|
|
18
20
|
from agno.db.schemas.knowledge import KnowledgeRow
|
|
19
21
|
from agno.db.schemas.memory import UserMemory
|
|
20
22
|
from agno.session import AgentSession, Session, TeamSession, WorkflowSession
|
|
21
23
|
from agno.utils.log import log_debug, log_error, log_info, log_warning
|
|
24
|
+
from agno.utils.string import generate_id
|
|
22
25
|
|
|
23
26
|
|
|
24
27
|
class JsonDb(BaseDb):
|
|
@@ -26,10 +29,12 @@ class JsonDb(BaseDb):
|
|
|
26
29
|
self,
|
|
27
30
|
db_path: Optional[str] = None,
|
|
28
31
|
session_table: Optional[str] = None,
|
|
32
|
+
culture_table: Optional[str] = None,
|
|
29
33
|
memory_table: Optional[str] = None,
|
|
30
34
|
metrics_table: Optional[str] = None,
|
|
31
35
|
eval_table: Optional[str] = None,
|
|
32
36
|
knowledge_table: Optional[str] = None,
|
|
37
|
+
id: Optional[str] = None,
|
|
33
38
|
):
|
|
34
39
|
"""
|
|
35
40
|
Interface for interacting with JSON files as database.
|
|
@@ -37,13 +42,21 @@ class JsonDb(BaseDb):
|
|
|
37
42
|
Args:
|
|
38
43
|
db_path (Optional[str]): Path to the directory where JSON files will be stored.
|
|
39
44
|
session_table (Optional[str]): Name of the JSON file to store sessions (without .json extension).
|
|
45
|
+
culture_table (Optional[str]): Name of the JSON file to store cultural knowledge.
|
|
40
46
|
memory_table (Optional[str]): Name of the JSON file to store memories.
|
|
41
47
|
metrics_table (Optional[str]): Name of the JSON file to store metrics.
|
|
42
48
|
eval_table (Optional[str]): Name of the JSON file to store evaluation runs.
|
|
43
49
|
knowledge_table (Optional[str]): Name of the JSON file to store knowledge content.
|
|
50
|
+
id (Optional[str]): ID of the database.
|
|
44
51
|
"""
|
|
52
|
+
if id is None:
|
|
53
|
+
seed = db_path or "agno_json_db"
|
|
54
|
+
id = generate_id(seed)
|
|
55
|
+
|
|
45
56
|
super().__init__(
|
|
57
|
+
id=id,
|
|
46
58
|
session_table=session_table,
|
|
59
|
+
culture_table=culture_table,
|
|
47
60
|
memory_table=memory_table,
|
|
48
61
|
metrics_table=metrics_table,
|
|
49
62
|
eval_table=eval_table,
|
|
@@ -53,6 +66,10 @@ class JsonDb(BaseDb):
|
|
|
53
66
|
# Create the directory where the JSON files will be stored, if it doesn't exist
|
|
54
67
|
self.db_path = Path(db_path or os.path.join(os.getcwd(), "agno_json_db"))
|
|
55
68
|
|
|
69
|
+
def table_exists(self, table_name: str) -> bool:
|
|
70
|
+
"""JSON implementation, always returns True."""
|
|
71
|
+
return True
|
|
72
|
+
|
|
56
73
|
def _read_json_file(self, filename: str, create_table_if_not_found: Optional[bool] = True) -> List[Dict[str, Any]]:
|
|
57
74
|
"""Read data from a JSON file, creating it if it doesn't exist.
|
|
58
75
|
|
|
@@ -105,7 +122,15 @@ class JsonDb(BaseDb):
|
|
|
105
122
|
|
|
106
123
|
except Exception as e:
|
|
107
124
|
log_error(f"Error writing to the {file_path} JSON file: {e}")
|
|
108
|
-
|
|
125
|
+
raise e
|
|
126
|
+
|
|
127
|
+
def get_latest_schema_version(self):
|
|
128
|
+
"""Get the latest version of the database schema."""
|
|
129
|
+
pass
|
|
130
|
+
|
|
131
|
+
def upsert_schema_version(self, version: str) -> None:
|
|
132
|
+
"""Upsert the schema version into the database."""
|
|
133
|
+
pass
|
|
109
134
|
|
|
110
135
|
# -- Session methods --
|
|
111
136
|
|
|
@@ -137,7 +162,7 @@ class JsonDb(BaseDb):
|
|
|
137
162
|
|
|
138
163
|
except Exception as e:
|
|
139
164
|
log_error(f"Error deleting session: {e}")
|
|
140
|
-
|
|
165
|
+
raise e
|
|
141
166
|
|
|
142
167
|
def delete_sessions(self, session_ids: List[str]) -> None:
|
|
143
168
|
"""Delete multiple sessions from the JSON file.
|
|
@@ -156,11 +181,12 @@ class JsonDb(BaseDb):
|
|
|
156
181
|
|
|
157
182
|
except Exception as e:
|
|
158
183
|
log_error(f"Error deleting sessions: {e}")
|
|
184
|
+
raise e
|
|
159
185
|
|
|
160
186
|
def get_session(
|
|
161
187
|
self,
|
|
162
188
|
session_id: str,
|
|
163
|
-
session_type:
|
|
189
|
+
session_type: SessionType,
|
|
164
190
|
user_id: Optional[str] = None,
|
|
165
191
|
deserialize: Optional[bool] = True,
|
|
166
192
|
) -> Optional[Union[AgentSession, TeamSession, WorkflowSession, Dict[str, Any]]]:
|
|
@@ -168,7 +194,7 @@ class JsonDb(BaseDb):
|
|
|
168
194
|
|
|
169
195
|
Args:
|
|
170
196
|
session_id (str): The ID of the session to read.
|
|
171
|
-
session_type (
|
|
197
|
+
session_type (SessionType): The type of the session to read.
|
|
172
198
|
user_id (Optional[str]): The ID of the user to read the session for.
|
|
173
199
|
deserialize (Optional[bool]): Whether to deserialize the session.
|
|
174
200
|
|
|
@@ -187,27 +213,24 @@ class JsonDb(BaseDb):
|
|
|
187
213
|
if session_data.get("session_id") == session_id:
|
|
188
214
|
if user_id is not None and session_data.get("user_id") != user_id:
|
|
189
215
|
continue
|
|
190
|
-
session_type_value = session_type.value if isinstance(session_type, SessionType) else session_type
|
|
191
|
-
if session_data.get("session_type") != session_type_value:
|
|
192
|
-
continue
|
|
193
|
-
|
|
194
|
-
session = hydrate_session(session_data)
|
|
195
216
|
|
|
196
217
|
if not deserialize:
|
|
197
|
-
return
|
|
218
|
+
return session_data
|
|
198
219
|
|
|
199
220
|
if session_type == SessionType.AGENT:
|
|
200
|
-
return AgentSession.from_dict(
|
|
221
|
+
return AgentSession.from_dict(session_data)
|
|
201
222
|
elif session_type == SessionType.TEAM:
|
|
202
|
-
return TeamSession.from_dict(
|
|
223
|
+
return TeamSession.from_dict(session_data)
|
|
224
|
+
elif session_type == SessionType.WORKFLOW:
|
|
225
|
+
return WorkflowSession.from_dict(session_data)
|
|
203
226
|
else:
|
|
204
|
-
|
|
227
|
+
raise ValueError(f"Invalid session type: {session_type}")
|
|
205
228
|
|
|
206
229
|
return None
|
|
207
230
|
|
|
208
231
|
except Exception as e:
|
|
209
232
|
log_error(f"Exception reading from session file: {e}")
|
|
210
|
-
|
|
233
|
+
raise e
|
|
211
234
|
|
|
212
235
|
def get_sessions(
|
|
213
236
|
self,
|
|
@@ -302,7 +325,7 @@ class JsonDb(BaseDb):
|
|
|
302
325
|
|
|
303
326
|
except Exception as e:
|
|
304
327
|
log_error(f"Exception reading from session file: {e}")
|
|
305
|
-
|
|
328
|
+
raise e
|
|
306
329
|
|
|
307
330
|
def rename_session(
|
|
308
331
|
self, session_id: str, session_type: SessionType, session_name: str, deserialize: Optional[bool] = True
|
|
@@ -330,14 +353,16 @@ class JsonDb(BaseDb):
|
|
|
330
353
|
return AgentSession.from_dict(session)
|
|
331
354
|
elif session_type == SessionType.TEAM:
|
|
332
355
|
return TeamSession.from_dict(session)
|
|
333
|
-
|
|
356
|
+
elif session_type == SessionType.WORKFLOW:
|
|
334
357
|
return WorkflowSession.from_dict(session)
|
|
358
|
+
else:
|
|
359
|
+
raise ValueError(f"Invalid session type: {session_type}")
|
|
335
360
|
|
|
336
361
|
return None
|
|
337
362
|
|
|
338
363
|
except Exception as e:
|
|
339
364
|
log_error(f"Exception renaming session: {e}")
|
|
340
|
-
|
|
365
|
+
raise e
|
|
341
366
|
|
|
342
367
|
def upsert_session(
|
|
343
368
|
self, session: Session, deserialize: Optional[bool] = True
|
|
@@ -382,7 +407,44 @@ class JsonDb(BaseDb):
|
|
|
382
407
|
|
|
383
408
|
except Exception as e:
|
|
384
409
|
log_error(f"Exception upserting session: {e}")
|
|
385
|
-
|
|
410
|
+
raise e
|
|
411
|
+
|
|
412
|
+
def upsert_sessions(
|
|
413
|
+
self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
|
|
414
|
+
) -> List[Union[Session, Dict[str, Any]]]:
|
|
415
|
+
"""
|
|
416
|
+
Bulk upsert multiple sessions for improved performance on large datasets.
|
|
417
|
+
|
|
418
|
+
Args:
|
|
419
|
+
sessions (List[Session]): List of sessions to upsert.
|
|
420
|
+
deserialize (Optional[bool]): Whether to deserialize the sessions. Defaults to True.
|
|
421
|
+
|
|
422
|
+
Returns:
|
|
423
|
+
List[Union[Session, Dict[str, Any]]]: List of upserted sessions.
|
|
424
|
+
|
|
425
|
+
Raises:
|
|
426
|
+
Exception: If an error occurs during bulk upsert.
|
|
427
|
+
"""
|
|
428
|
+
if not sessions:
|
|
429
|
+
return []
|
|
430
|
+
|
|
431
|
+
try:
|
|
432
|
+
log_info(
|
|
433
|
+
f"JsonDb doesn't support efficient bulk operations, falling back to individual upserts for {len(sessions)} sessions"
|
|
434
|
+
)
|
|
435
|
+
|
|
436
|
+
# Fall back to individual upserts
|
|
437
|
+
results = []
|
|
438
|
+
for session in sessions:
|
|
439
|
+
if session is not None:
|
|
440
|
+
result = self.upsert_session(session, deserialize=deserialize)
|
|
441
|
+
if result is not None:
|
|
442
|
+
results.append(result)
|
|
443
|
+
return results
|
|
444
|
+
|
|
445
|
+
except Exception as e:
|
|
446
|
+
log_error(f"Exception during bulk session upsert: {e}")
|
|
447
|
+
return []
|
|
386
448
|
|
|
387
449
|
def _matches_session_key(self, existing_session: Dict[str, Any], session: Session) -> bool:
|
|
388
450
|
"""Check if existing session matches the key for the session type."""
|
|
@@ -395,11 +457,29 @@ class JsonDb(BaseDb):
|
|
|
395
457
|
return False
|
|
396
458
|
|
|
397
459
|
# -- Memory methods --
|
|
398
|
-
def delete_user_memory(self, memory_id: str):
|
|
399
|
-
"""Delete a user memory from the JSON file.
|
|
460
|
+
def delete_user_memory(self, memory_id: str, user_id: Optional[str] = None):
|
|
461
|
+
"""Delete a user memory from the JSON file.
|
|
462
|
+
|
|
463
|
+
Args:
|
|
464
|
+
memory_id (str): The ID of the memory to delete.
|
|
465
|
+
user_id (Optional[str]): The ID of the user (optional, for filtering).
|
|
466
|
+
"""
|
|
400
467
|
try:
|
|
401
468
|
memories = self._read_json_file(self.memory_table_name)
|
|
402
469
|
original_count = len(memories)
|
|
470
|
+
|
|
471
|
+
# If user_id is provided, verify the memory belongs to the user before deleting
|
|
472
|
+
if user_id:
|
|
473
|
+
memory_to_delete = None
|
|
474
|
+
for m in memories:
|
|
475
|
+
if m.get("memory_id") == memory_id:
|
|
476
|
+
memory_to_delete = m
|
|
477
|
+
break
|
|
478
|
+
|
|
479
|
+
if memory_to_delete and memory_to_delete.get("user_id") != user_id:
|
|
480
|
+
log_debug(f"Memory {memory_id} does not belong to user {user_id}")
|
|
481
|
+
return
|
|
482
|
+
|
|
403
483
|
memories = [m for m in memories if m.get("memory_id") != memory_id]
|
|
404
484
|
|
|
405
485
|
if len(memories) < original_count:
|
|
@@ -410,11 +490,26 @@ class JsonDb(BaseDb):
|
|
|
410
490
|
|
|
411
491
|
except Exception as e:
|
|
412
492
|
log_error(f"Error deleting memory: {e}")
|
|
493
|
+
raise e
|
|
494
|
+
|
|
495
|
+
def delete_user_memories(self, memory_ids: List[str], user_id: Optional[str] = None) -> None:
|
|
496
|
+
"""Delete multiple user memories from the JSON file.
|
|
413
497
|
|
|
414
|
-
|
|
415
|
-
|
|
498
|
+
Args:
|
|
499
|
+
memory_ids (List[str]): List of memory IDs to delete.
|
|
500
|
+
user_id (Optional[str]): The ID of the user (optional, for filtering).
|
|
501
|
+
"""
|
|
416
502
|
try:
|
|
417
503
|
memories = self._read_json_file(self.memory_table_name)
|
|
504
|
+
|
|
505
|
+
# If user_id is provided, filter memory_ids to only those belonging to the user
|
|
506
|
+
if user_id:
|
|
507
|
+
filtered_memory_ids: List[str] = []
|
|
508
|
+
for memory in memories:
|
|
509
|
+
if memory.get("memory_id") in memory_ids and memory.get("user_id") == user_id:
|
|
510
|
+
filtered_memory_ids.append(memory.get("memory_id")) # type: ignore
|
|
511
|
+
memory_ids = filtered_memory_ids
|
|
512
|
+
|
|
418
513
|
memories = [m for m in memories if m.get("memory_id") not in memory_ids]
|
|
419
514
|
self._write_json_file(self.memory_table_name, memories)
|
|
420
515
|
|
|
@@ -422,9 +517,14 @@ class JsonDb(BaseDb):
|
|
|
422
517
|
|
|
423
518
|
except Exception as e:
|
|
424
519
|
log_error(f"Error deleting memories: {e}")
|
|
520
|
+
raise e
|
|
425
521
|
|
|
426
522
|
def get_all_memory_topics(self) -> List[str]:
|
|
427
|
-
"""Get all memory topics from the JSON file.
|
|
523
|
+
"""Get all memory topics from the JSON file.
|
|
524
|
+
|
|
525
|
+
Returns:
|
|
526
|
+
List[str]: List of unique memory topics.
|
|
527
|
+
"""
|
|
428
528
|
try:
|
|
429
529
|
memories = self._read_json_file(self.memory_table_name)
|
|
430
530
|
|
|
@@ -437,17 +537,33 @@ class JsonDb(BaseDb):
|
|
|
437
537
|
|
|
438
538
|
except Exception as e:
|
|
439
539
|
log_error(f"Exception reading from memory file: {e}")
|
|
440
|
-
|
|
540
|
+
raise e
|
|
441
541
|
|
|
442
542
|
def get_user_memory(
|
|
443
|
-
self,
|
|
543
|
+
self,
|
|
544
|
+
memory_id: str,
|
|
545
|
+
deserialize: Optional[bool] = True,
|
|
546
|
+
user_id: Optional[str] = None,
|
|
444
547
|
) -> Optional[Union[UserMemory, Dict[str, Any]]]:
|
|
445
|
-
"""Get a memory from the JSON file.
|
|
548
|
+
"""Get a memory from the JSON file.
|
|
549
|
+
|
|
550
|
+
Args:
|
|
551
|
+
memory_id (str): The ID of the memory to get.
|
|
552
|
+
deserialize (Optional[bool]): Whether to deserialize the memory.
|
|
553
|
+
user_id (Optional[str]): The ID of the user (optional, for filtering).
|
|
554
|
+
|
|
555
|
+
Returns:
|
|
556
|
+
Optional[Union[UserMemory, Dict[str, Any]]]: The user memory data if found, None otherwise.
|
|
557
|
+
"""
|
|
446
558
|
try:
|
|
447
559
|
memories = self._read_json_file(self.memory_table_name)
|
|
448
560
|
|
|
449
561
|
for memory_data in memories:
|
|
450
562
|
if memory_data.get("memory_id") == memory_id:
|
|
563
|
+
# Filter by user_id if provided
|
|
564
|
+
if user_id and memory_data.get("user_id") != user_id:
|
|
565
|
+
return None
|
|
566
|
+
|
|
451
567
|
if not deserialize:
|
|
452
568
|
return memory_data
|
|
453
569
|
return UserMemory.from_dict(memory_data)
|
|
@@ -456,7 +572,7 @@ class JsonDb(BaseDb):
|
|
|
456
572
|
|
|
457
573
|
except Exception as e:
|
|
458
574
|
log_error(f"Exception reading from memory file: {e}")
|
|
459
|
-
|
|
575
|
+
raise e
|
|
460
576
|
|
|
461
577
|
def get_user_memories(
|
|
462
578
|
self,
|
|
@@ -514,25 +630,37 @@ class JsonDb(BaseDb):
|
|
|
514
630
|
|
|
515
631
|
except Exception as e:
|
|
516
632
|
log_error(f"Exception reading from memory file: {e}")
|
|
517
|
-
|
|
633
|
+
raise e
|
|
518
634
|
|
|
519
635
|
def get_user_memory_stats(
|
|
520
636
|
self, limit: Optional[int] = None, page: Optional[int] = None
|
|
521
637
|
) -> Tuple[List[Dict[str, Any]], int]:
|
|
522
|
-
"""Get user memory statistics.
|
|
638
|
+
"""Get user memory statistics.
|
|
639
|
+
|
|
640
|
+
Args:
|
|
641
|
+
limit (Optional[int]): The maximum number of user stats to return.
|
|
642
|
+
page (Optional[int]): The page number.
|
|
643
|
+
|
|
644
|
+
Returns:
|
|
645
|
+
Tuple[List[Dict[str, Any]], int]: A list of dictionaries containing user stats and total count.
|
|
646
|
+
"""
|
|
523
647
|
try:
|
|
524
648
|
memories = self._read_json_file(self.memory_table_name)
|
|
525
649
|
user_stats = {}
|
|
526
650
|
|
|
527
651
|
for memory in memories:
|
|
528
|
-
|
|
529
|
-
if
|
|
530
|
-
if
|
|
531
|
-
user_stats[
|
|
532
|
-
|
|
652
|
+
memory_user_id = memory.get("user_id")
|
|
653
|
+
if memory_user_id:
|
|
654
|
+
if memory_user_id not in user_stats:
|
|
655
|
+
user_stats[memory_user_id] = {
|
|
656
|
+
"user_id": memory_user_id,
|
|
657
|
+
"total_memories": 0,
|
|
658
|
+
"last_memory_updated_at": 0,
|
|
659
|
+
}
|
|
660
|
+
user_stats[memory_user_id]["total_memories"] += 1
|
|
533
661
|
updated_at = memory.get("updated_at", 0)
|
|
534
|
-
if updated_at > user_stats[
|
|
535
|
-
user_stats[
|
|
662
|
+
if updated_at > user_stats[memory_user_id]["last_memory_updated_at"]:
|
|
663
|
+
user_stats[memory_user_id]["last_memory_updated_at"] = updated_at
|
|
536
664
|
|
|
537
665
|
stats_list = list(user_stats.values())
|
|
538
666
|
stats_list.sort(key=lambda x: x["last_memory_updated_at"], reverse=True)
|
|
@@ -550,7 +678,7 @@ class JsonDb(BaseDb):
|
|
|
550
678
|
|
|
551
679
|
except Exception as e:
|
|
552
680
|
log_error(f"Exception getting user memory stats: {e}")
|
|
553
|
-
|
|
681
|
+
raise e
|
|
554
682
|
|
|
555
683
|
def upsert_user_memory(
|
|
556
684
|
self, memory: UserMemory, deserialize: Optional[bool] = True
|
|
@@ -584,7 +712,43 @@ class JsonDb(BaseDb):
|
|
|
584
712
|
|
|
585
713
|
except Exception as e:
|
|
586
714
|
log_warning(f"Exception upserting user memory: {e}")
|
|
587
|
-
|
|
715
|
+
raise e
|
|
716
|
+
|
|
717
|
+
def upsert_memories(
|
|
718
|
+
self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
|
|
719
|
+
) -> List[Union[UserMemory, Dict[str, Any]]]:
|
|
720
|
+
"""
|
|
721
|
+
Bulk upsert multiple user memories for improved performance on large datasets.
|
|
722
|
+
|
|
723
|
+
Args:
|
|
724
|
+
memories (List[UserMemory]): List of memories to upsert.
|
|
725
|
+
deserialize (Optional[bool]): Whether to deserialize the memories. Defaults to True.
|
|
726
|
+
|
|
727
|
+
Returns:
|
|
728
|
+
List[Union[UserMemory, Dict[str, Any]]]: List of upserted memories.
|
|
729
|
+
|
|
730
|
+
Raises:
|
|
731
|
+
Exception: If an error occurs during bulk upsert.
|
|
732
|
+
"""
|
|
733
|
+
if not memories:
|
|
734
|
+
return []
|
|
735
|
+
|
|
736
|
+
try:
|
|
737
|
+
log_info(
|
|
738
|
+
f"JsonDb doesn't support efficient bulk operations, falling back to individual upserts for {len(memories)} memories"
|
|
739
|
+
)
|
|
740
|
+
# Fall back to individual upserts
|
|
741
|
+
results = []
|
|
742
|
+
for memory in memories:
|
|
743
|
+
if memory is not None:
|
|
744
|
+
result = self.upsert_user_memory(memory, deserialize=deserialize)
|
|
745
|
+
if result is not None:
|
|
746
|
+
results.append(result)
|
|
747
|
+
return results
|
|
748
|
+
|
|
749
|
+
except Exception as e:
|
|
750
|
+
log_error(f"Exception during bulk memory upsert: {e}")
|
|
751
|
+
return []
|
|
588
752
|
|
|
589
753
|
def clear_memories(self) -> None:
|
|
590
754
|
"""Delete all memories from the database.
|
|
@@ -598,6 +762,7 @@ class JsonDb(BaseDb):
|
|
|
598
762
|
|
|
599
763
|
except Exception as e:
|
|
600
764
|
log_warning(f"Exception deleting all memories: {e}")
|
|
765
|
+
raise e
|
|
601
766
|
|
|
602
767
|
# -- Metrics methods --
|
|
603
768
|
def calculate_metrics(self) -> Optional[list[dict]]:
|
|
@@ -666,7 +831,7 @@ class JsonDb(BaseDb):
|
|
|
666
831
|
|
|
667
832
|
except Exception as e:
|
|
668
833
|
log_warning(f"Exception refreshing metrics: {e}")
|
|
669
|
-
|
|
834
|
+
raise e
|
|
670
835
|
|
|
671
836
|
def _get_metrics_calculation_starting_date(self, metrics: List[Dict[str, Any]]) -> Optional[date]:
|
|
672
837
|
"""Get the first date for which metrics calculation is needed."""
|
|
@@ -721,7 +886,7 @@ class JsonDb(BaseDb):
|
|
|
721
886
|
|
|
722
887
|
except Exception as e:
|
|
723
888
|
log_error(f"Exception reading sessions for metrics: {e}")
|
|
724
|
-
|
|
889
|
+
raise e
|
|
725
890
|
|
|
726
891
|
def get_metrics(
|
|
727
892
|
self,
|
|
@@ -753,7 +918,7 @@ class JsonDb(BaseDb):
|
|
|
753
918
|
|
|
754
919
|
except Exception as e:
|
|
755
920
|
log_error(f"Exception getting metrics: {e}")
|
|
756
|
-
|
|
921
|
+
raise e
|
|
757
922
|
|
|
758
923
|
# -- Knowledge methods --
|
|
759
924
|
|
|
@@ -773,6 +938,7 @@ class JsonDb(BaseDb):
|
|
|
773
938
|
|
|
774
939
|
except Exception as e:
|
|
775
940
|
log_error(f"Error deleting knowledge content: {e}")
|
|
941
|
+
raise e
|
|
776
942
|
|
|
777
943
|
def get_knowledge_content(self, id: str) -> Optional[KnowledgeRow]:
|
|
778
944
|
"""Get a knowledge row from the database.
|
|
@@ -797,7 +963,7 @@ class JsonDb(BaseDb):
|
|
|
797
963
|
|
|
798
964
|
except Exception as e:
|
|
799
965
|
log_error(f"Error getting knowledge content: {e}")
|
|
800
|
-
|
|
966
|
+
raise e
|
|
801
967
|
|
|
802
968
|
def get_knowledge_contents(
|
|
803
969
|
self,
|
|
@@ -839,7 +1005,7 @@ class JsonDb(BaseDb):
|
|
|
839
1005
|
|
|
840
1006
|
except Exception as e:
|
|
841
1007
|
log_error(f"Error getting knowledge contents: {e}")
|
|
842
|
-
|
|
1008
|
+
raise e
|
|
843
1009
|
|
|
844
1010
|
def upsert_knowledge_content(self, knowledge_row: KnowledgeRow):
|
|
845
1011
|
"""Upsert knowledge content in the database.
|
|
@@ -874,7 +1040,7 @@ class JsonDb(BaseDb):
|
|
|
874
1040
|
|
|
875
1041
|
except Exception as e:
|
|
876
1042
|
log_error(f"Error upserting knowledge row: {e}")
|
|
877
|
-
|
|
1043
|
+
raise e
|
|
878
1044
|
|
|
879
1045
|
# -- Eval methods --
|
|
880
1046
|
|
|
@@ -897,7 +1063,7 @@ class JsonDb(BaseDb):
|
|
|
897
1063
|
|
|
898
1064
|
except Exception as e:
|
|
899
1065
|
log_error(f"Error creating eval run: {e}")
|
|
900
|
-
|
|
1066
|
+
raise e
|
|
901
1067
|
|
|
902
1068
|
def delete_eval_run(self, eval_run_id: str) -> None:
|
|
903
1069
|
"""Delete an eval run from the JSON file."""
|
|
@@ -914,6 +1080,7 @@ class JsonDb(BaseDb):
|
|
|
914
1080
|
|
|
915
1081
|
except Exception as e:
|
|
916
1082
|
log_error(f"Error deleting eval run {eval_run_id}: {e}")
|
|
1083
|
+
raise e
|
|
917
1084
|
|
|
918
1085
|
def delete_eval_runs(self, eval_run_ids: List[str]) -> None:
|
|
919
1086
|
"""Delete multiple eval runs from the JSON file."""
|
|
@@ -931,6 +1098,7 @@ class JsonDb(BaseDb):
|
|
|
931
1098
|
|
|
932
1099
|
except Exception as e:
|
|
933
1100
|
log_error(f"Error deleting eval runs {eval_run_ids}: {e}")
|
|
1101
|
+
raise e
|
|
934
1102
|
|
|
935
1103
|
def get_eval_run(
|
|
936
1104
|
self, eval_run_id: str, deserialize: Optional[bool] = True
|
|
@@ -949,7 +1117,7 @@ class JsonDb(BaseDb):
|
|
|
949
1117
|
|
|
950
1118
|
except Exception as e:
|
|
951
1119
|
log_error(f"Exception getting eval run {eval_run_id}: {e}")
|
|
952
|
-
|
|
1120
|
+
raise e
|
|
953
1121
|
|
|
954
1122
|
def get_eval_runs(
|
|
955
1123
|
self,
|
|
@@ -1015,7 +1183,7 @@ class JsonDb(BaseDb):
|
|
|
1015
1183
|
|
|
1016
1184
|
except Exception as e:
|
|
1017
1185
|
log_error(f"Exception getting eval runs: {e}")
|
|
1018
|
-
|
|
1186
|
+
raise e
|
|
1019
1187
|
|
|
1020
1188
|
def rename_eval_run(
|
|
1021
1189
|
self, eval_run_id: str, name: str, deserialize: Optional[bool] = True
|
|
@@ -1042,4 +1210,127 @@ class JsonDb(BaseDb):
|
|
|
1042
1210
|
|
|
1043
1211
|
except Exception as e:
|
|
1044
1212
|
log_error(f"Error renaming eval run {eval_run_id}: {e}")
|
|
1213
|
+
raise e
|
|
1214
|
+
|
|
1215
|
+
# -- Culture methods --
|
|
1216
|
+
|
|
1217
|
+
def clear_cultural_knowledge(self) -> None:
|
|
1218
|
+
"""Delete all cultural knowledge from JSON file."""
|
|
1219
|
+
try:
|
|
1220
|
+
self._write_json_file(self.culture_table_name, [])
|
|
1221
|
+
except Exception as e:
|
|
1222
|
+
log_error(f"Error clearing cultural knowledge: {e}")
|
|
1223
|
+
raise e
|
|
1224
|
+
|
|
1225
|
+
def delete_cultural_knowledge(self, id: str) -> None:
|
|
1226
|
+
"""Delete a cultural knowledge entry from JSON file."""
|
|
1227
|
+
try:
|
|
1228
|
+
cultural_knowledge = self._read_json_file(self.culture_table_name)
|
|
1229
|
+
cultural_knowledge = [ck for ck in cultural_knowledge if ck.get("id") != id]
|
|
1230
|
+
self._write_json_file(self.culture_table_name, cultural_knowledge)
|
|
1231
|
+
except Exception as e:
|
|
1232
|
+
log_error(f"Error deleting cultural knowledge: {e}")
|
|
1233
|
+
raise e
|
|
1234
|
+
|
|
1235
|
+
def get_cultural_knowledge(
|
|
1236
|
+
self, id: str, deserialize: Optional[bool] = True
|
|
1237
|
+
) -> Optional[Union[CulturalKnowledge, Dict[str, Any]]]:
|
|
1238
|
+
"""Get a cultural knowledge entry from JSON file."""
|
|
1239
|
+
try:
|
|
1240
|
+
cultural_knowledge = self._read_json_file(self.culture_table_name)
|
|
1241
|
+
for ck in cultural_knowledge:
|
|
1242
|
+
if ck.get("id") == id:
|
|
1243
|
+
if not deserialize:
|
|
1244
|
+
return ck
|
|
1245
|
+
return deserialize_cultural_knowledge_from_db(ck)
|
|
1045
1246
|
return None
|
|
1247
|
+
except Exception as e:
|
|
1248
|
+
log_error(f"Error getting cultural knowledge: {e}")
|
|
1249
|
+
raise e
|
|
1250
|
+
|
|
1251
|
+
def get_all_cultural_knowledge(
|
|
1252
|
+
self,
|
|
1253
|
+
name: Optional[str] = None,
|
|
1254
|
+
agent_id: Optional[str] = None,
|
|
1255
|
+
team_id: Optional[str] = None,
|
|
1256
|
+
limit: Optional[int] = None,
|
|
1257
|
+
page: Optional[int] = None,
|
|
1258
|
+
sort_by: Optional[str] = None,
|
|
1259
|
+
sort_order: Optional[str] = None,
|
|
1260
|
+
deserialize: Optional[bool] = True,
|
|
1261
|
+
) -> Union[List[CulturalKnowledge], Tuple[List[Dict[str, Any]], int]]:
|
|
1262
|
+
"""Get all cultural knowledge from JSON file."""
|
|
1263
|
+
try:
|
|
1264
|
+
cultural_knowledge = self._read_json_file(self.culture_table_name)
|
|
1265
|
+
|
|
1266
|
+
# Filter
|
|
1267
|
+
filtered = []
|
|
1268
|
+
for ck in cultural_knowledge:
|
|
1269
|
+
if name and ck.get("name") != name:
|
|
1270
|
+
continue
|
|
1271
|
+
if agent_id and ck.get("agent_id") != agent_id:
|
|
1272
|
+
continue
|
|
1273
|
+
if team_id and ck.get("team_id") != team_id:
|
|
1274
|
+
continue
|
|
1275
|
+
filtered.append(ck)
|
|
1276
|
+
|
|
1277
|
+
# Sort
|
|
1278
|
+
if sort_by:
|
|
1279
|
+
filtered = apply_sorting(filtered, sort_by, sort_order)
|
|
1280
|
+
|
|
1281
|
+
total_count = len(filtered)
|
|
1282
|
+
|
|
1283
|
+
# Paginate
|
|
1284
|
+
if limit and page:
|
|
1285
|
+
start = (page - 1) * limit
|
|
1286
|
+
filtered = filtered[start : start + limit]
|
|
1287
|
+
elif limit:
|
|
1288
|
+
filtered = filtered[:limit]
|
|
1289
|
+
|
|
1290
|
+
if not deserialize:
|
|
1291
|
+
return filtered, total_count
|
|
1292
|
+
|
|
1293
|
+
return [deserialize_cultural_knowledge_from_db(ck) for ck in filtered]
|
|
1294
|
+
except Exception as e:
|
|
1295
|
+
log_error(f"Error getting all cultural knowledge: {e}")
|
|
1296
|
+
raise e
|
|
1297
|
+
|
|
1298
|
+
def upsert_cultural_knowledge(
|
|
1299
|
+
self, cultural_knowledge: CulturalKnowledge, deserialize: Optional[bool] = True
|
|
1300
|
+
) -> Optional[Union[CulturalKnowledge, Dict[str, Any]]]:
|
|
1301
|
+
"""Upsert a cultural knowledge entry into JSON file."""
|
|
1302
|
+
try:
|
|
1303
|
+
if not cultural_knowledge.id:
|
|
1304
|
+
cultural_knowledge.id = str(uuid4())
|
|
1305
|
+
|
|
1306
|
+
all_cultural_knowledge = self._read_json_file(self.culture_table_name, create_table_if_not_found=True)
|
|
1307
|
+
|
|
1308
|
+
# Serialize content, categories, and notes into a dict for DB storage
|
|
1309
|
+
content_dict = serialize_cultural_knowledge_for_db(cultural_knowledge)
|
|
1310
|
+
|
|
1311
|
+
# Create the item dict with serialized content
|
|
1312
|
+
ck_dict = {
|
|
1313
|
+
"id": cultural_knowledge.id,
|
|
1314
|
+
"name": cultural_knowledge.name,
|
|
1315
|
+
"summary": cultural_knowledge.summary,
|
|
1316
|
+
"content": content_dict if content_dict else None,
|
|
1317
|
+
"metadata": cultural_knowledge.metadata,
|
|
1318
|
+
"input": cultural_knowledge.input,
|
|
1319
|
+
"created_at": cultural_knowledge.created_at,
|
|
1320
|
+
"updated_at": int(time.time()),
|
|
1321
|
+
"agent_id": cultural_knowledge.agent_id,
|
|
1322
|
+
"team_id": cultural_knowledge.team_id,
|
|
1323
|
+
}
|
|
1324
|
+
|
|
1325
|
+
# Remove existing entry
|
|
1326
|
+
all_cultural_knowledge = [ck for ck in all_cultural_knowledge if ck.get("id") != cultural_knowledge.id]
|
|
1327
|
+
|
|
1328
|
+
# Add new entry
|
|
1329
|
+
all_cultural_knowledge.append(ck_dict)
|
|
1330
|
+
|
|
1331
|
+
self._write_json_file(self.culture_table_name, all_cultural_knowledge)
|
|
1332
|
+
|
|
1333
|
+
return self.get_cultural_knowledge(cultural_knowledge.id, deserialize=deserialize)
|
|
1334
|
+
except Exception as e:
|
|
1335
|
+
log_error(f"Error upserting cultural knowledge: {e}")
|
|
1336
|
+
raise e
|