agno 2.0.0rc2__py3-none-any.whl → 2.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agno/agent/agent.py +6009 -2874
- agno/api/api.py +2 -0
- agno/api/os.py +1 -1
- agno/culture/__init__.py +3 -0
- agno/culture/manager.py +956 -0
- agno/db/async_postgres/__init__.py +3 -0
- agno/db/base.py +385 -6
- agno/db/dynamo/dynamo.py +388 -81
- agno/db/dynamo/schemas.py +47 -10
- agno/db/dynamo/utils.py +63 -4
- agno/db/firestore/firestore.py +435 -64
- agno/db/firestore/schemas.py +11 -0
- agno/db/firestore/utils.py +102 -4
- agno/db/gcs_json/gcs_json_db.py +384 -42
- agno/db/gcs_json/utils.py +60 -26
- agno/db/in_memory/in_memory_db.py +351 -66
- agno/db/in_memory/utils.py +60 -2
- agno/db/json/json_db.py +339 -48
- agno/db/json/utils.py +60 -26
- agno/db/migrations/manager.py +199 -0
- agno/db/migrations/v1_to_v2.py +510 -37
- agno/db/migrations/versions/__init__.py +0 -0
- agno/db/migrations/versions/v2_3_0.py +938 -0
- agno/db/mongo/__init__.py +15 -1
- agno/db/mongo/async_mongo.py +2036 -0
- agno/db/mongo/mongo.py +653 -76
- agno/db/mongo/schemas.py +13 -0
- agno/db/mongo/utils.py +80 -8
- agno/db/mysql/mysql.py +687 -25
- agno/db/mysql/schemas.py +61 -37
- agno/db/mysql/utils.py +60 -2
- agno/db/postgres/__init__.py +2 -1
- agno/db/postgres/async_postgres.py +2001 -0
- agno/db/postgres/postgres.py +676 -57
- agno/db/postgres/schemas.py +43 -18
- agno/db/postgres/utils.py +164 -2
- agno/db/redis/redis.py +344 -38
- agno/db/redis/schemas.py +18 -0
- agno/db/redis/utils.py +60 -2
- agno/db/schemas/__init__.py +2 -1
- agno/db/schemas/culture.py +120 -0
- agno/db/schemas/memory.py +13 -0
- agno/db/singlestore/schemas.py +26 -1
- agno/db/singlestore/singlestore.py +687 -53
- agno/db/singlestore/utils.py +60 -2
- agno/db/sqlite/__init__.py +2 -1
- agno/db/sqlite/async_sqlite.py +2371 -0
- agno/db/sqlite/schemas.py +24 -0
- agno/db/sqlite/sqlite.py +774 -85
- agno/db/sqlite/utils.py +168 -5
- agno/db/surrealdb/__init__.py +3 -0
- agno/db/surrealdb/metrics.py +292 -0
- agno/db/surrealdb/models.py +309 -0
- agno/db/surrealdb/queries.py +71 -0
- agno/db/surrealdb/surrealdb.py +1361 -0
- agno/db/surrealdb/utils.py +147 -0
- agno/db/utils.py +50 -22
- agno/eval/accuracy.py +50 -43
- agno/eval/performance.py +6 -3
- agno/eval/reliability.py +6 -3
- agno/eval/utils.py +33 -16
- agno/exceptions.py +68 -1
- agno/filters.py +354 -0
- agno/guardrails/__init__.py +6 -0
- agno/guardrails/base.py +19 -0
- agno/guardrails/openai.py +144 -0
- agno/guardrails/pii.py +94 -0
- agno/guardrails/prompt_injection.py +52 -0
- agno/integrations/discord/client.py +1 -0
- agno/knowledge/chunking/agentic.py +13 -10
- agno/knowledge/chunking/fixed.py +1 -1
- agno/knowledge/chunking/semantic.py +40 -8
- agno/knowledge/chunking/strategy.py +59 -15
- agno/knowledge/embedder/aws_bedrock.py +9 -4
- agno/knowledge/embedder/azure_openai.py +54 -0
- agno/knowledge/embedder/base.py +2 -0
- agno/knowledge/embedder/cohere.py +184 -5
- agno/knowledge/embedder/fastembed.py +1 -1
- agno/knowledge/embedder/google.py +79 -1
- agno/knowledge/embedder/huggingface.py +9 -4
- agno/knowledge/embedder/jina.py +63 -0
- agno/knowledge/embedder/mistral.py +78 -11
- agno/knowledge/embedder/nebius.py +1 -1
- agno/knowledge/embedder/ollama.py +13 -0
- agno/knowledge/embedder/openai.py +37 -65
- agno/knowledge/embedder/sentence_transformer.py +8 -4
- agno/knowledge/embedder/vllm.py +262 -0
- agno/knowledge/embedder/voyageai.py +69 -16
- agno/knowledge/knowledge.py +595 -187
- agno/knowledge/reader/base.py +9 -2
- agno/knowledge/reader/csv_reader.py +8 -10
- agno/knowledge/reader/docx_reader.py +5 -6
- agno/knowledge/reader/field_labeled_csv_reader.py +290 -0
- agno/knowledge/reader/json_reader.py +6 -5
- agno/knowledge/reader/markdown_reader.py +13 -13
- agno/knowledge/reader/pdf_reader.py +43 -68
- agno/knowledge/reader/pptx_reader.py +101 -0
- agno/knowledge/reader/reader_factory.py +51 -6
- agno/knowledge/reader/s3_reader.py +3 -15
- agno/knowledge/reader/tavily_reader.py +194 -0
- agno/knowledge/reader/text_reader.py +13 -13
- agno/knowledge/reader/web_search_reader.py +2 -43
- agno/knowledge/reader/website_reader.py +43 -25
- agno/knowledge/reranker/__init__.py +3 -0
- agno/knowledge/types.py +9 -0
- agno/knowledge/utils.py +20 -0
- agno/media.py +339 -266
- agno/memory/manager.py +336 -82
- agno/models/aimlapi/aimlapi.py +2 -2
- agno/models/anthropic/claude.py +183 -37
- agno/models/aws/bedrock.py +52 -112
- agno/models/aws/claude.py +33 -1
- agno/models/azure/ai_foundry.py +33 -15
- agno/models/azure/openai_chat.py +25 -8
- agno/models/base.py +1011 -566
- agno/models/cerebras/cerebras.py +19 -13
- agno/models/cerebras/cerebras_openai.py +8 -5
- agno/models/cohere/chat.py +27 -1
- agno/models/cometapi/__init__.py +5 -0
- agno/models/cometapi/cometapi.py +57 -0
- agno/models/dashscope/dashscope.py +1 -0
- agno/models/deepinfra/deepinfra.py +2 -2
- agno/models/deepseek/deepseek.py +2 -2
- agno/models/fireworks/fireworks.py +2 -2
- agno/models/google/gemini.py +110 -37
- agno/models/groq/groq.py +28 -11
- agno/models/huggingface/huggingface.py +2 -1
- agno/models/internlm/internlm.py +2 -2
- agno/models/langdb/langdb.py +4 -4
- agno/models/litellm/chat.py +18 -1
- agno/models/litellm/litellm_openai.py +2 -2
- agno/models/llama_cpp/__init__.py +5 -0
- agno/models/llama_cpp/llama_cpp.py +22 -0
- agno/models/message.py +143 -4
- agno/models/meta/llama.py +27 -10
- agno/models/meta/llama_openai.py +5 -17
- agno/models/nebius/nebius.py +6 -6
- agno/models/nexus/__init__.py +3 -0
- agno/models/nexus/nexus.py +22 -0
- agno/models/nvidia/nvidia.py +2 -2
- agno/models/ollama/chat.py +60 -6
- agno/models/openai/chat.py +102 -43
- agno/models/openai/responses.py +103 -106
- agno/models/openrouter/openrouter.py +41 -3
- agno/models/perplexity/perplexity.py +4 -5
- agno/models/portkey/portkey.py +3 -3
- agno/models/requesty/__init__.py +5 -0
- agno/models/requesty/requesty.py +52 -0
- agno/models/response.py +81 -5
- agno/models/sambanova/sambanova.py +2 -2
- agno/models/siliconflow/__init__.py +5 -0
- agno/models/siliconflow/siliconflow.py +25 -0
- agno/models/together/together.py +2 -2
- agno/models/utils.py +254 -8
- agno/models/vercel/v0.py +2 -2
- agno/models/vertexai/__init__.py +0 -0
- agno/models/vertexai/claude.py +96 -0
- agno/models/vllm/vllm.py +1 -0
- agno/models/xai/xai.py +3 -2
- agno/os/app.py +543 -175
- agno/os/auth.py +24 -14
- agno/os/config.py +1 -0
- agno/os/interfaces/__init__.py +1 -0
- agno/os/interfaces/a2a/__init__.py +3 -0
- agno/os/interfaces/a2a/a2a.py +42 -0
- agno/os/interfaces/a2a/router.py +250 -0
- agno/os/interfaces/a2a/utils.py +924 -0
- agno/os/interfaces/agui/agui.py +23 -7
- agno/os/interfaces/agui/router.py +27 -3
- agno/os/interfaces/agui/utils.py +242 -142
- agno/os/interfaces/base.py +6 -2
- agno/os/interfaces/slack/router.py +81 -23
- agno/os/interfaces/slack/slack.py +29 -14
- agno/os/interfaces/whatsapp/router.py +11 -4
- agno/os/interfaces/whatsapp/whatsapp.py +14 -7
- agno/os/mcp.py +111 -54
- agno/os/middleware/__init__.py +7 -0
- agno/os/middleware/jwt.py +233 -0
- agno/os/router.py +556 -139
- agno/os/routers/evals/evals.py +71 -34
- agno/os/routers/evals/schemas.py +31 -31
- agno/os/routers/evals/utils.py +6 -5
- agno/os/routers/health.py +31 -0
- agno/os/routers/home.py +52 -0
- agno/os/routers/knowledge/knowledge.py +185 -38
- agno/os/routers/knowledge/schemas.py +82 -22
- agno/os/routers/memory/memory.py +158 -53
- agno/os/routers/memory/schemas.py +20 -16
- agno/os/routers/metrics/metrics.py +20 -8
- agno/os/routers/metrics/schemas.py +16 -16
- agno/os/routers/session/session.py +499 -38
- agno/os/schema.py +308 -198
- agno/os/utils.py +401 -41
- agno/reasoning/anthropic.py +80 -0
- agno/reasoning/azure_ai_foundry.py +2 -2
- agno/reasoning/deepseek.py +2 -2
- agno/reasoning/default.py +3 -1
- agno/reasoning/gemini.py +73 -0
- agno/reasoning/groq.py +2 -2
- agno/reasoning/ollama.py +2 -2
- agno/reasoning/openai.py +7 -2
- agno/reasoning/vertexai.py +76 -0
- agno/run/__init__.py +6 -0
- agno/run/agent.py +266 -112
- agno/run/base.py +53 -24
- agno/run/team.py +252 -111
- agno/run/workflow.py +156 -45
- agno/session/agent.py +105 -89
- agno/session/summary.py +65 -25
- agno/session/team.py +176 -96
- agno/session/workflow.py +406 -40
- agno/team/team.py +3854 -1692
- agno/tools/brightdata.py +3 -3
- agno/tools/cartesia.py +3 -5
- agno/tools/dalle.py +9 -8
- agno/tools/decorator.py +4 -2
- agno/tools/desi_vocal.py +2 -2
- agno/tools/duckduckgo.py +15 -11
- agno/tools/e2b.py +20 -13
- agno/tools/eleven_labs.py +26 -28
- agno/tools/exa.py +21 -16
- agno/tools/fal.py +4 -4
- agno/tools/file.py +153 -23
- agno/tools/file_generation.py +350 -0
- agno/tools/firecrawl.py +4 -4
- agno/tools/function.py +257 -37
- agno/tools/giphy.py +2 -2
- agno/tools/gmail.py +238 -14
- agno/tools/google_drive.py +270 -0
- agno/tools/googlecalendar.py +36 -8
- agno/tools/googlesheets.py +20 -5
- agno/tools/jira.py +20 -0
- agno/tools/knowledge.py +3 -3
- agno/tools/lumalab.py +3 -3
- agno/tools/mcp/__init__.py +10 -0
- agno/tools/mcp/mcp.py +331 -0
- agno/tools/mcp/multi_mcp.py +347 -0
- agno/tools/mcp/params.py +24 -0
- agno/tools/mcp_toolbox.py +284 -0
- agno/tools/mem0.py +11 -17
- agno/tools/memori.py +1 -53
- agno/tools/memory.py +419 -0
- agno/tools/models/azure_openai.py +2 -2
- agno/tools/models/gemini.py +3 -3
- agno/tools/models/groq.py +3 -5
- agno/tools/models/nebius.py +7 -7
- agno/tools/models_labs.py +25 -15
- agno/tools/notion.py +204 -0
- agno/tools/openai.py +4 -9
- agno/tools/opencv.py +3 -3
- agno/tools/parallel.py +314 -0
- agno/tools/replicate.py +7 -7
- agno/tools/scrapegraph.py +58 -31
- agno/tools/searxng.py +2 -2
- agno/tools/serper.py +2 -2
- agno/tools/slack.py +18 -3
- agno/tools/spider.py +2 -2
- agno/tools/tavily.py +146 -0
- agno/tools/whatsapp.py +1 -1
- agno/tools/workflow.py +278 -0
- agno/tools/yfinance.py +12 -11
- agno/utils/agent.py +820 -0
- agno/utils/audio.py +27 -0
- agno/utils/common.py +90 -1
- agno/utils/events.py +222 -7
- agno/utils/gemini.py +181 -23
- agno/utils/hooks.py +57 -0
- agno/utils/http.py +111 -0
- agno/utils/knowledge.py +12 -5
- agno/utils/log.py +1 -0
- agno/utils/mcp.py +95 -5
- agno/utils/media.py +188 -10
- agno/utils/merge_dict.py +22 -1
- agno/utils/message.py +60 -0
- agno/utils/models/claude.py +40 -11
- agno/utils/models/cohere.py +1 -1
- agno/utils/models/watsonx.py +1 -1
- agno/utils/openai.py +1 -1
- agno/utils/print_response/agent.py +105 -21
- agno/utils/print_response/team.py +103 -38
- agno/utils/print_response/workflow.py +251 -34
- agno/utils/reasoning.py +22 -1
- agno/utils/serialize.py +32 -0
- agno/utils/streamlit.py +16 -10
- agno/utils/string.py +41 -0
- agno/utils/team.py +98 -9
- agno/utils/tools.py +1 -1
- agno/vectordb/base.py +23 -4
- agno/vectordb/cassandra/cassandra.py +65 -9
- agno/vectordb/chroma/chromadb.py +182 -38
- agno/vectordb/clickhouse/clickhousedb.py +64 -11
- agno/vectordb/couchbase/couchbase.py +105 -10
- agno/vectordb/lancedb/lance_db.py +183 -135
- agno/vectordb/langchaindb/langchaindb.py +25 -7
- agno/vectordb/lightrag/lightrag.py +17 -3
- agno/vectordb/llamaindex/__init__.py +3 -0
- agno/vectordb/llamaindex/llamaindexdb.py +46 -7
- agno/vectordb/milvus/milvus.py +126 -9
- agno/vectordb/mongodb/__init__.py +7 -1
- agno/vectordb/mongodb/mongodb.py +112 -7
- agno/vectordb/pgvector/pgvector.py +142 -21
- agno/vectordb/pineconedb/pineconedb.py +80 -8
- agno/vectordb/qdrant/qdrant.py +125 -39
- agno/vectordb/redis/__init__.py +9 -0
- agno/vectordb/redis/redisdb.py +694 -0
- agno/vectordb/singlestore/singlestore.py +111 -25
- agno/vectordb/surrealdb/surrealdb.py +31 -5
- agno/vectordb/upstashdb/upstashdb.py +76 -8
- agno/vectordb/weaviate/weaviate.py +86 -15
- agno/workflow/__init__.py +2 -0
- agno/workflow/agent.py +299 -0
- agno/workflow/condition.py +112 -18
- agno/workflow/loop.py +69 -10
- agno/workflow/parallel.py +266 -118
- agno/workflow/router.py +110 -17
- agno/workflow/step.py +645 -136
- agno/workflow/steps.py +65 -6
- agno/workflow/types.py +71 -33
- agno/workflow/workflow.py +2113 -300
- agno-2.3.0.dist-info/METADATA +618 -0
- agno-2.3.0.dist-info/RECORD +577 -0
- agno-2.3.0.dist-info/licenses/LICENSE +201 -0
- agno/knowledge/reader/url_reader.py +0 -128
- agno/tools/googlesearch.py +0 -98
- agno/tools/mcp.py +0 -610
- agno/utils/models/aws_claude.py +0 -170
- agno-2.0.0rc2.dist-info/METADATA +0 -355
- agno-2.0.0rc2.dist-info/RECORD +0 -515
- agno-2.0.0rc2.dist-info/licenses/LICENSE +0 -375
- {agno-2.0.0rc2.dist-info → agno-2.3.0.dist-info}/WHEEL +0 -0
- {agno-2.0.0rc2.dist-info → agno-2.3.0.dist-info}/top_level.txt +0 -0
agno/db/gcs_json/gcs_json_db.py
CHANGED
|
@@ -8,14 +8,18 @@ from agno.db.base import BaseDb, SessionType
|
|
|
8
8
|
from agno.db.gcs_json.utils import (
|
|
9
9
|
apply_sorting,
|
|
10
10
|
calculate_date_metrics,
|
|
11
|
+
deserialize_cultural_knowledge_from_db,
|
|
11
12
|
fetch_all_sessions_data,
|
|
12
13
|
get_dates_to_calculate_metrics_for,
|
|
14
|
+
serialize_cultural_knowledge_for_db,
|
|
13
15
|
)
|
|
16
|
+
from agno.db.schemas.culture import CulturalKnowledge
|
|
14
17
|
from agno.db.schemas.evals import EvalFilterType, EvalRunRecord, EvalType
|
|
15
18
|
from agno.db.schemas.knowledge import KnowledgeRow
|
|
16
19
|
from agno.db.schemas.memory import UserMemory
|
|
17
20
|
from agno.session import AgentSession, Session, TeamSession, WorkflowSession
|
|
18
21
|
from agno.utils.log import log_debug, log_error, log_info, log_warning
|
|
22
|
+
from agno.utils.string import generate_id
|
|
19
23
|
|
|
20
24
|
try:
|
|
21
25
|
from google.cloud import storage as gcs # type: ignore
|
|
@@ -33,8 +37,10 @@ class GcsJsonDb(BaseDb):
|
|
|
33
37
|
metrics_table: Optional[str] = None,
|
|
34
38
|
eval_table: Optional[str] = None,
|
|
35
39
|
knowledge_table: Optional[str] = None,
|
|
40
|
+
culture_table: Optional[str] = None,
|
|
36
41
|
project: Optional[str] = None,
|
|
37
42
|
credentials: Optional[Any] = None,
|
|
43
|
+
id: Optional[str] = None,
|
|
38
44
|
):
|
|
39
45
|
"""
|
|
40
46
|
Interface for interacting with JSON files stored in Google Cloud Storage as database.
|
|
@@ -47,16 +53,25 @@ class GcsJsonDb(BaseDb):
|
|
|
47
53
|
metrics_table (Optional[str]): Name of the JSON file to store metrics.
|
|
48
54
|
eval_table (Optional[str]): Name of the JSON file to store evaluation runs.
|
|
49
55
|
knowledge_table (Optional[str]): Name of the JSON file to store knowledge content.
|
|
56
|
+
culture_table (Optional[str]): Name of the JSON file to store cultural knowledge.
|
|
50
57
|
project (Optional[str]): GCP project ID. If None, uses default project.
|
|
51
58
|
location (Optional[str]): GCS bucket location. If None, uses default location.
|
|
52
59
|
credentials (Optional[Any]): GCP credentials. If None, uses default credentials.
|
|
60
|
+
id (Optional[str]): ID of the database.
|
|
53
61
|
"""
|
|
62
|
+
if id is None:
|
|
63
|
+
prefix_suffix = prefix or "agno/"
|
|
64
|
+
seed = f"{bucket_name}_{project}#{prefix_suffix}"
|
|
65
|
+
id = generate_id(seed)
|
|
66
|
+
|
|
54
67
|
super().__init__(
|
|
68
|
+
id=id,
|
|
55
69
|
session_table=session_table,
|
|
56
70
|
memory_table=memory_table,
|
|
57
71
|
metrics_table=metrics_table,
|
|
58
72
|
eval_table=eval_table,
|
|
59
73
|
knowledge_table=knowledge_table,
|
|
74
|
+
culture_table=culture_table,
|
|
60
75
|
)
|
|
61
76
|
|
|
62
77
|
self.bucket_name = bucket_name
|
|
@@ -68,6 +83,10 @@ class GcsJsonDb(BaseDb):
|
|
|
68
83
|
self.client = gcs.Client(project=project, credentials=credentials)
|
|
69
84
|
self.bucket = self.client.bucket(self.bucket_name)
|
|
70
85
|
|
|
86
|
+
def table_exists(self, table_name: str) -> bool:
|
|
87
|
+
"""JSON implementation, always returns True."""
|
|
88
|
+
return True
|
|
89
|
+
|
|
71
90
|
def _get_blob_name(self, filename: str) -> str:
|
|
72
91
|
"""Get the full blob name including prefix for a given filename."""
|
|
73
92
|
return f"{self.prefix}{filename}.json"
|
|
@@ -123,6 +142,14 @@ class GcsJsonDb(BaseDb):
|
|
|
123
142
|
log_error(f"Error writing to the {blob_name} JSON file in GCS: {e}")
|
|
124
143
|
return
|
|
125
144
|
|
|
145
|
+
def get_latest_schema_version(self):
|
|
146
|
+
"""Get the latest version of the database schema."""
|
|
147
|
+
pass
|
|
148
|
+
|
|
149
|
+
def upsert_schema_version(self, version: str) -> None:
|
|
150
|
+
"""Upsert the schema version into the database."""
|
|
151
|
+
pass
|
|
152
|
+
|
|
126
153
|
# -- Session methods --
|
|
127
154
|
|
|
128
155
|
def delete_session(self, session_id: str) -> bool:
|
|
@@ -153,7 +180,7 @@ class GcsJsonDb(BaseDb):
|
|
|
153
180
|
|
|
154
181
|
except Exception as e:
|
|
155
182
|
log_warning(f"Error deleting session: {e}")
|
|
156
|
-
|
|
183
|
+
raise e
|
|
157
184
|
|
|
158
185
|
def delete_sessions(self, session_ids: List[str]) -> None:
|
|
159
186
|
"""Delete multiple sessions from the GCS JSON file.
|
|
@@ -172,11 +199,12 @@ class GcsJsonDb(BaseDb):
|
|
|
172
199
|
|
|
173
200
|
except Exception as e:
|
|
174
201
|
log_warning(f"Error deleting sessions: {e}")
|
|
202
|
+
raise e
|
|
175
203
|
|
|
176
204
|
def get_session(
|
|
177
205
|
self,
|
|
178
206
|
session_id: str,
|
|
179
|
-
session_type:
|
|
207
|
+
session_type: SessionType,
|
|
180
208
|
user_id: Optional[str] = None,
|
|
181
209
|
deserialize: Optional[bool] = True,
|
|
182
210
|
) -> Optional[Union[AgentSession, TeamSession, WorkflowSession, Dict[str, Any]]]:
|
|
@@ -184,7 +212,7 @@ class GcsJsonDb(BaseDb):
|
|
|
184
212
|
|
|
185
213
|
Args:
|
|
186
214
|
session_id (str): The ID of the session to read.
|
|
187
|
-
session_type (
|
|
215
|
+
session_type (SessionType): The type of the session to read.
|
|
188
216
|
user_id (Optional[str]): The ID of the user to read the session for.
|
|
189
217
|
deserialize (Optional[bool]): Whether to deserialize the session.
|
|
190
218
|
|
|
@@ -204,10 +232,6 @@ class GcsJsonDb(BaseDb):
|
|
|
204
232
|
if user_id is not None and session_data.get("user_id") != user_id:
|
|
205
233
|
continue
|
|
206
234
|
|
|
207
|
-
session_type_value = session_type.value if isinstance(session_type, SessionType) else session_type
|
|
208
|
-
if session_data.get("session_type") != session_type_value:
|
|
209
|
-
continue
|
|
210
|
-
|
|
211
235
|
if not deserialize:
|
|
212
236
|
return session_data
|
|
213
237
|
|
|
@@ -217,12 +241,14 @@ class GcsJsonDb(BaseDb):
|
|
|
217
241
|
return TeamSession.from_dict(session_data)
|
|
218
242
|
elif session_type == SessionType.WORKFLOW:
|
|
219
243
|
return WorkflowSession.from_dict(session_data)
|
|
244
|
+
else:
|
|
245
|
+
raise ValueError(f"Invalid session type: {session_type}")
|
|
220
246
|
|
|
221
247
|
return None
|
|
222
248
|
|
|
223
249
|
except Exception as e:
|
|
224
250
|
log_warning(f"Exception reading from session file: {e}")
|
|
225
|
-
|
|
251
|
+
raise e
|
|
226
252
|
|
|
227
253
|
def get_sessions(
|
|
228
254
|
self,
|
|
@@ -317,7 +343,7 @@ class GcsJsonDb(BaseDb):
|
|
|
317
343
|
|
|
318
344
|
except Exception as e:
|
|
319
345
|
log_warning(f"Exception reading from session file: {e}")
|
|
320
|
-
|
|
346
|
+
raise e
|
|
321
347
|
|
|
322
348
|
def rename_session(
|
|
323
349
|
self, session_id: str, session_type: SessionType, session_name: str, deserialize: Optional[bool] = True
|
|
@@ -352,7 +378,7 @@ class GcsJsonDb(BaseDb):
|
|
|
352
378
|
return None
|
|
353
379
|
except Exception as e:
|
|
354
380
|
log_warning(f"Exception renaming session: {e}")
|
|
355
|
-
|
|
381
|
+
raise e
|
|
356
382
|
|
|
357
383
|
def upsert_session(
|
|
358
384
|
self, session: Session, deserialize: Optional[bool] = True
|
|
@@ -397,7 +423,44 @@ class GcsJsonDb(BaseDb):
|
|
|
397
423
|
|
|
398
424
|
except Exception as e:
|
|
399
425
|
log_warning(f"Exception upserting session: {e}")
|
|
400
|
-
|
|
426
|
+
raise e
|
|
427
|
+
|
|
428
|
+
def upsert_sessions(
|
|
429
|
+
self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
|
|
430
|
+
) -> List[Union[Session, Dict[str, Any]]]:
|
|
431
|
+
"""
|
|
432
|
+
Bulk upsert multiple sessions for improved performance on large datasets.
|
|
433
|
+
|
|
434
|
+
Args:
|
|
435
|
+
sessions (List[Session]): List of sessions to upsert.
|
|
436
|
+
deserialize (Optional[bool]): Whether to deserialize the sessions. Defaults to True.
|
|
437
|
+
|
|
438
|
+
Returns:
|
|
439
|
+
List[Union[Session, Dict[str, Any]]]: List of upserted sessions.
|
|
440
|
+
|
|
441
|
+
Raises:
|
|
442
|
+
Exception: If an error occurs during bulk upsert.
|
|
443
|
+
"""
|
|
444
|
+
if not sessions:
|
|
445
|
+
return []
|
|
446
|
+
|
|
447
|
+
try:
|
|
448
|
+
log_info(
|
|
449
|
+
f"GcsJsonDb doesn't support efficient bulk operations, falling back to individual upserts for {len(sessions)} sessions"
|
|
450
|
+
)
|
|
451
|
+
|
|
452
|
+
# Fall back to individual upserts
|
|
453
|
+
results = []
|
|
454
|
+
for session in sessions:
|
|
455
|
+
if session is not None:
|
|
456
|
+
result = self.upsert_session(session, deserialize=deserialize)
|
|
457
|
+
if result is not None:
|
|
458
|
+
results.append(result)
|
|
459
|
+
return results
|
|
460
|
+
|
|
461
|
+
except Exception as e:
|
|
462
|
+
log_error(f"Exception during bulk session upsert: {e}")
|
|
463
|
+
return []
|
|
401
464
|
|
|
402
465
|
def _matches_session_key(self, existing_session: Dict[str, Any], session: Session) -> bool:
|
|
403
466
|
"""Check if existing session matches the key for the session type."""
|
|
@@ -410,12 +473,23 @@ class GcsJsonDb(BaseDb):
|
|
|
410
473
|
return False
|
|
411
474
|
|
|
412
475
|
# -- Memory methods --
|
|
413
|
-
def delete_user_memory(self, memory_id: str) -> None:
|
|
414
|
-
"""Delete a user memory from the GCS JSON file.
|
|
476
|
+
def delete_user_memory(self, memory_id: str, user_id: Optional[str] = None) -> None:
|
|
477
|
+
"""Delete a user memory from the GCS JSON file.
|
|
478
|
+
|
|
479
|
+
Args:
|
|
480
|
+
memory_id (str): The ID of the memory to delete.
|
|
481
|
+
user_id (Optional[str]): The ID of the user. If provided, verifies ownership before deletion.
|
|
482
|
+
"""
|
|
415
483
|
try:
|
|
416
484
|
memories = self._read_json_file(self.memory_table_name)
|
|
417
485
|
original_count = len(memories)
|
|
418
|
-
|
|
486
|
+
|
|
487
|
+
# Filter out the memory, with optional user_id verification
|
|
488
|
+
memories = [
|
|
489
|
+
m
|
|
490
|
+
for m in memories
|
|
491
|
+
if not (m.get("memory_id") == memory_id and (user_id is None or m.get("user_id") == user_id))
|
|
492
|
+
]
|
|
419
493
|
|
|
420
494
|
if len(memories) < original_count:
|
|
421
495
|
self._write_json_file(self.memory_table_name, memories)
|
|
@@ -426,19 +500,37 @@ class GcsJsonDb(BaseDb):
|
|
|
426
500
|
|
|
427
501
|
except Exception as e:
|
|
428
502
|
log_warning(f"Error deleting user memory: {e}")
|
|
503
|
+
raise e
|
|
504
|
+
|
|
505
|
+
def delete_user_memories(self, memory_ids: List[str], user_id: Optional[str] = None) -> None:
|
|
506
|
+
"""Delete multiple user memories from the GCS JSON file.
|
|
429
507
|
|
|
430
|
-
|
|
431
|
-
|
|
508
|
+
Args:
|
|
509
|
+
memory_ids (List[str]): The IDs of the memories to delete.
|
|
510
|
+
user_id (Optional[str]): The ID of the user. If provided, verifies ownership before deletion.
|
|
511
|
+
"""
|
|
432
512
|
try:
|
|
433
513
|
memories = self._read_json_file(self.memory_table_name)
|
|
434
|
-
|
|
514
|
+
|
|
515
|
+
# Filter out memories, with optional user_id verification
|
|
516
|
+
memories = [
|
|
517
|
+
m
|
|
518
|
+
for m in memories
|
|
519
|
+
if not (m.get("memory_id") in memory_ids and (user_id is None or m.get("user_id") == user_id))
|
|
520
|
+
]
|
|
521
|
+
|
|
435
522
|
self._write_json_file(self.memory_table_name, memories)
|
|
436
523
|
log_debug(f"Successfully deleted user memories with ids: {memory_ids}")
|
|
437
524
|
except Exception as e:
|
|
438
525
|
log_warning(f"Error deleting user memories: {e}")
|
|
526
|
+
raise e
|
|
439
527
|
|
|
440
528
|
def get_all_memory_topics(self) -> List[str]:
|
|
441
|
-
"""Get all memory topics from the GCS JSON file.
|
|
529
|
+
"""Get all memory topics from the GCS JSON file.
|
|
530
|
+
|
|
531
|
+
Returns:
|
|
532
|
+
List[str]: List of unique memory topics.
|
|
533
|
+
"""
|
|
442
534
|
try:
|
|
443
535
|
memories = self._read_json_file(self.memory_table_name)
|
|
444
536
|
topics = set()
|
|
@@ -450,17 +542,30 @@ class GcsJsonDb(BaseDb):
|
|
|
450
542
|
|
|
451
543
|
except Exception as e:
|
|
452
544
|
log_warning(f"Exception reading from memory file: {e}")
|
|
453
|
-
|
|
545
|
+
raise e
|
|
454
546
|
|
|
455
547
|
def get_user_memory(
|
|
456
|
-
self, memory_id: str, deserialize: Optional[bool] = True
|
|
548
|
+
self, memory_id: str, deserialize: Optional[bool] = True, user_id: Optional[str] = None
|
|
457
549
|
) -> Optional[Union[UserMemory, Dict[str, Any]]]:
|
|
458
|
-
"""Get a memory from the GCS JSON file.
|
|
550
|
+
"""Get a memory from the GCS JSON file.
|
|
551
|
+
|
|
552
|
+
Args:
|
|
553
|
+
memory_id (str): The ID of the memory to retrieve.
|
|
554
|
+
deserialize (Optional[bool]): Whether to deserialize to UserMemory object. Defaults to True.
|
|
555
|
+
user_id (Optional[str]): The ID of the user. If provided, verifies ownership before returning.
|
|
556
|
+
|
|
557
|
+
Returns:
|
|
558
|
+
Optional[Union[UserMemory, Dict[str, Any]]]: The memory if found and ownership matches, None otherwise.
|
|
559
|
+
"""
|
|
459
560
|
try:
|
|
460
561
|
memories = self._read_json_file(self.memory_table_name)
|
|
461
562
|
|
|
462
563
|
for memory_data in memories:
|
|
463
564
|
if memory_data.get("memory_id") == memory_id:
|
|
565
|
+
# Verify user ownership if user_id is provided
|
|
566
|
+
if user_id is not None and memory_data.get("user_id") != user_id:
|
|
567
|
+
continue
|
|
568
|
+
|
|
464
569
|
if not deserialize:
|
|
465
570
|
return memory_data
|
|
466
571
|
|
|
@@ -469,7 +574,7 @@ class GcsJsonDb(BaseDb):
|
|
|
469
574
|
return None
|
|
470
575
|
except Exception as e:
|
|
471
576
|
log_warning(f"Exception reading from memory file: {e}")
|
|
472
|
-
|
|
577
|
+
raise e
|
|
473
578
|
|
|
474
579
|
def get_user_memories(
|
|
475
580
|
self,
|
|
@@ -527,25 +632,38 @@ class GcsJsonDb(BaseDb):
|
|
|
527
632
|
|
|
528
633
|
except Exception as e:
|
|
529
634
|
log_warning(f"Exception reading from memory file: {e}")
|
|
530
|
-
|
|
635
|
+
raise e
|
|
531
636
|
|
|
532
637
|
def get_user_memory_stats(
|
|
533
638
|
self, limit: Optional[int] = None, page: Optional[int] = None
|
|
534
639
|
) -> Tuple[List[Dict[str, Any]], int]:
|
|
535
|
-
"""Get user memory statistics.
|
|
640
|
+
"""Get user memory statistics.
|
|
641
|
+
|
|
642
|
+
Args:
|
|
643
|
+
limit (Optional[int]): Maximum number of results to return.
|
|
644
|
+
page (Optional[int]): Page number for pagination.
|
|
645
|
+
|
|
646
|
+
Returns:
|
|
647
|
+
Tuple[List[Dict[str, Any]], int]: List of user memory statistics and total count.
|
|
648
|
+
"""
|
|
536
649
|
try:
|
|
537
650
|
memories = self._read_json_file(self.memory_table_name)
|
|
538
651
|
user_stats = {}
|
|
539
652
|
|
|
540
653
|
for memory in memories:
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
654
|
+
memory_user_id = memory.get("user_id")
|
|
655
|
+
|
|
656
|
+
if memory_user_id:
|
|
657
|
+
if memory_user_id not in user_stats:
|
|
658
|
+
user_stats[memory_user_id] = {
|
|
659
|
+
"user_id": memory_user_id,
|
|
660
|
+
"total_memories": 0,
|
|
661
|
+
"last_memory_updated_at": 0,
|
|
662
|
+
}
|
|
663
|
+
user_stats[memory_user_id]["total_memories"] += 1
|
|
546
664
|
updated_at = memory.get("updated_at", 0)
|
|
547
|
-
if updated_at > user_stats[
|
|
548
|
-
user_stats[
|
|
665
|
+
if updated_at > user_stats[memory_user_id]["last_memory_updated_at"]:
|
|
666
|
+
user_stats[memory_user_id]["last_memory_updated_at"] = updated_at
|
|
549
667
|
|
|
550
668
|
stats_list = list(user_stats.values())
|
|
551
669
|
stats_list.sort(key=lambda x: x["last_memory_updated_at"], reverse=True)
|
|
@@ -563,7 +681,7 @@ class GcsJsonDb(BaseDb):
|
|
|
563
681
|
|
|
564
682
|
except Exception as e:
|
|
565
683
|
log_warning(f"Exception getting user memory stats: {e}")
|
|
566
|
-
|
|
684
|
+
raise e
|
|
567
685
|
|
|
568
686
|
def upsert_user_memory(
|
|
569
687
|
self, memory: UserMemory, deserialize: Optional[bool] = True
|
|
@@ -597,7 +715,43 @@ class GcsJsonDb(BaseDb):
|
|
|
597
715
|
|
|
598
716
|
except Exception as e:
|
|
599
717
|
log_error(f"Exception upserting user memory: {e}")
|
|
600
|
-
|
|
718
|
+
raise e
|
|
719
|
+
|
|
720
|
+
def upsert_memories(
|
|
721
|
+
self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
|
|
722
|
+
) -> List[Union[UserMemory, Dict[str, Any]]]:
|
|
723
|
+
"""
|
|
724
|
+
Bulk upsert multiple user memories for improved performance on large datasets.
|
|
725
|
+
|
|
726
|
+
Args:
|
|
727
|
+
memories (List[UserMemory]): List of memories to upsert.
|
|
728
|
+
deserialize (Optional[bool]): Whether to deserialize the memories. Defaults to True.
|
|
729
|
+
|
|
730
|
+
Returns:
|
|
731
|
+
List[Union[UserMemory, Dict[str, Any]]]: List of upserted memories.
|
|
732
|
+
|
|
733
|
+
Raises:
|
|
734
|
+
Exception: If an error occurs during bulk upsert.
|
|
735
|
+
"""
|
|
736
|
+
if not memories:
|
|
737
|
+
return []
|
|
738
|
+
|
|
739
|
+
try:
|
|
740
|
+
log_info(
|
|
741
|
+
f"GcsJsonDb doesn't support efficient bulk operations, falling back to individual upserts for {len(memories)} memories"
|
|
742
|
+
)
|
|
743
|
+
# Fall back to individual upserts
|
|
744
|
+
results = []
|
|
745
|
+
for memory in memories:
|
|
746
|
+
if memory is not None:
|
|
747
|
+
result = self.upsert_user_memory(memory, deserialize=deserialize)
|
|
748
|
+
if result is not None:
|
|
749
|
+
results.append(result)
|
|
750
|
+
return results
|
|
751
|
+
|
|
752
|
+
except Exception as e:
|
|
753
|
+
log_error(f"Exception during bulk memory upsert: {e}")
|
|
754
|
+
return []
|
|
601
755
|
|
|
602
756
|
def clear_memories(self) -> None:
|
|
603
757
|
"""Delete all memories from the database.
|
|
@@ -611,6 +765,7 @@ class GcsJsonDb(BaseDb):
|
|
|
611
765
|
|
|
612
766
|
except Exception as e:
|
|
613
767
|
log_warning(f"Exception deleting all memories: {e}")
|
|
768
|
+
raise e
|
|
614
769
|
|
|
615
770
|
# -- Metrics methods --
|
|
616
771
|
def calculate_metrics(self) -> Optional[list[dict]]:
|
|
@@ -677,7 +832,7 @@ class GcsJsonDb(BaseDb):
|
|
|
677
832
|
|
|
678
833
|
except Exception as e:
|
|
679
834
|
log_warning(f"Exception refreshing metrics: {e}")
|
|
680
|
-
|
|
835
|
+
raise e
|
|
681
836
|
|
|
682
837
|
def _get_metrics_calculation_starting_date(self, metrics: List[Dict[str, Any]]) -> Optional[date]:
|
|
683
838
|
"""Get the first date for which metrics calculation is needed."""
|
|
@@ -732,7 +887,7 @@ class GcsJsonDb(BaseDb):
|
|
|
732
887
|
|
|
733
888
|
except Exception as e:
|
|
734
889
|
log_warning(f"Exception reading sessions for metrics: {e}")
|
|
735
|
-
|
|
890
|
+
raise e
|
|
736
891
|
|
|
737
892
|
def get_metrics(
|
|
738
893
|
self,
|
|
@@ -764,7 +919,7 @@ class GcsJsonDb(BaseDb):
|
|
|
764
919
|
|
|
765
920
|
except Exception as e:
|
|
766
921
|
log_warning(f"Exception getting metrics: {e}")
|
|
767
|
-
|
|
922
|
+
raise e
|
|
768
923
|
|
|
769
924
|
# -- Knowledge methods --
|
|
770
925
|
def delete_knowledge_content(self, id: str):
|
|
@@ -775,6 +930,7 @@ class GcsJsonDb(BaseDb):
|
|
|
775
930
|
self._write_json_file(self.knowledge_table_name, knowledge_items)
|
|
776
931
|
except Exception as e:
|
|
777
932
|
log_warning(f"Error deleting knowledge content: {e}")
|
|
933
|
+
raise e
|
|
778
934
|
|
|
779
935
|
def get_knowledge_content(self, id: str) -> Optional[KnowledgeRow]:
|
|
780
936
|
"""Get knowledge content by ID."""
|
|
@@ -788,7 +944,7 @@ class GcsJsonDb(BaseDb):
|
|
|
788
944
|
return None
|
|
789
945
|
except Exception as e:
|
|
790
946
|
log_warning(f"Error getting knowledge content: {e}")
|
|
791
|
-
|
|
947
|
+
raise e
|
|
792
948
|
|
|
793
949
|
def get_knowledge_contents(
|
|
794
950
|
self,
|
|
@@ -817,7 +973,7 @@ class GcsJsonDb(BaseDb):
|
|
|
817
973
|
|
|
818
974
|
except Exception as e:
|
|
819
975
|
log_warning(f"Error getting knowledge contents: {e}")
|
|
820
|
-
|
|
976
|
+
raise e
|
|
821
977
|
|
|
822
978
|
def upsert_knowledge_content(self, knowledge_row: KnowledgeRow):
|
|
823
979
|
"""Upsert knowledge content in the GCS JSON file."""
|
|
@@ -841,7 +997,7 @@ class GcsJsonDb(BaseDb):
|
|
|
841
997
|
|
|
842
998
|
except Exception as e:
|
|
843
999
|
log_warning(f"Error upserting knowledge row: {e}")
|
|
844
|
-
|
|
1000
|
+
raise e
|
|
845
1001
|
|
|
846
1002
|
# -- Eval methods --
|
|
847
1003
|
def create_eval_run(self, eval_run: EvalRunRecord) -> Optional[EvalRunRecord]:
|
|
@@ -860,7 +1016,7 @@ class GcsJsonDb(BaseDb):
|
|
|
860
1016
|
return eval_run
|
|
861
1017
|
except Exception as e:
|
|
862
1018
|
log_warning(f"Error creating eval run: {e}")
|
|
863
|
-
|
|
1019
|
+
raise e
|
|
864
1020
|
|
|
865
1021
|
def delete_eval_run(self, eval_run_id: str) -> None:
|
|
866
1022
|
"""Delete an eval run from the GCS JSON file."""
|
|
@@ -876,6 +1032,7 @@ class GcsJsonDb(BaseDb):
|
|
|
876
1032
|
log_warning(f"No eval run found with ID: {eval_run_id}")
|
|
877
1033
|
except Exception as e:
|
|
878
1034
|
log_warning(f"Error deleting eval run {eval_run_id}: {e}")
|
|
1035
|
+
raise e
|
|
879
1036
|
|
|
880
1037
|
def delete_eval_runs(self, eval_run_ids: List[str]) -> None:
|
|
881
1038
|
"""Delete multiple eval runs from the GCS JSON file."""
|
|
@@ -892,6 +1049,7 @@ class GcsJsonDb(BaseDb):
|
|
|
892
1049
|
log_warning(f"No eval runs found with IDs: {eval_run_ids}")
|
|
893
1050
|
except Exception as e:
|
|
894
1051
|
log_warning(f"Error deleting eval runs {eval_run_ids}: {e}")
|
|
1052
|
+
raise e
|
|
895
1053
|
|
|
896
1054
|
def get_eval_run(
|
|
897
1055
|
self, eval_run_id: str, deserialize: Optional[bool] = True
|
|
@@ -909,7 +1067,7 @@ class GcsJsonDb(BaseDb):
|
|
|
909
1067
|
return None
|
|
910
1068
|
except Exception as e:
|
|
911
1069
|
log_warning(f"Exception getting eval run {eval_run_id}: {e}")
|
|
912
|
-
|
|
1070
|
+
raise e
|
|
913
1071
|
|
|
914
1072
|
def get_eval_runs(
|
|
915
1073
|
self,
|
|
@@ -975,7 +1133,7 @@ class GcsJsonDb(BaseDb):
|
|
|
975
1133
|
|
|
976
1134
|
except Exception as e:
|
|
977
1135
|
log_warning(f"Exception getting eval runs: {e}")
|
|
978
|
-
|
|
1136
|
+
raise e
|
|
979
1137
|
|
|
980
1138
|
def rename_eval_run(
|
|
981
1139
|
self, eval_run_id: str, name: str, deserialize: Optional[bool] = True
|
|
@@ -998,4 +1156,188 @@ class GcsJsonDb(BaseDb):
|
|
|
998
1156
|
return None
|
|
999
1157
|
except Exception as e:
|
|
1000
1158
|
log_warning(f"Error renaming eval run {eval_run_id}: {e}")
|
|
1159
|
+
raise e
|
|
1160
|
+
|
|
1161
|
+
# -- Cultural Knowledge methods --
|
|
1162
|
+
def clear_cultural_knowledge(self) -> None:
|
|
1163
|
+
"""Delete all cultural knowledge from the database.
|
|
1164
|
+
|
|
1165
|
+
Raises:
|
|
1166
|
+
Exception: If an error occurs during deletion.
|
|
1167
|
+
"""
|
|
1168
|
+
try:
|
|
1169
|
+
self._write_json_file(self.culture_table_name, [])
|
|
1170
|
+
except Exception as e:
|
|
1171
|
+
log_warning(f"Exception deleting all cultural knowledge: {e}")
|
|
1172
|
+
raise e
|
|
1173
|
+
|
|
1174
|
+
def delete_cultural_knowledge(self, id: str) -> None:
|
|
1175
|
+
"""Delete cultural knowledge by ID.
|
|
1176
|
+
|
|
1177
|
+
Args:
|
|
1178
|
+
id (str): The ID of the cultural knowledge to delete.
|
|
1179
|
+
|
|
1180
|
+
Raises:
|
|
1181
|
+
Exception: If an error occurs during deletion.
|
|
1182
|
+
"""
|
|
1183
|
+
try:
|
|
1184
|
+
cultural_knowledge = self._read_json_file(self.culture_table_name)
|
|
1185
|
+
cultural_knowledge = [item for item in cultural_knowledge if item.get("id") != id]
|
|
1186
|
+
self._write_json_file(self.culture_table_name, cultural_knowledge)
|
|
1187
|
+
log_debug(f"Deleted cultural knowledge with ID: {id}")
|
|
1188
|
+
except Exception as e:
|
|
1189
|
+
log_warning(f"Error deleting cultural knowledge: {e}")
|
|
1190
|
+
raise e
|
|
1191
|
+
|
|
1192
|
+
def get_cultural_knowledge(
|
|
1193
|
+
self, id: str, deserialize: Optional[bool] = True
|
|
1194
|
+
) -> Optional[Union[CulturalKnowledge, Dict[str, Any]]]:
|
|
1195
|
+
"""Get cultural knowledge by ID.
|
|
1196
|
+
|
|
1197
|
+
Args:
|
|
1198
|
+
id (str): The ID of the cultural knowledge to retrieve.
|
|
1199
|
+
deserialize (Optional[bool]): Whether to deserialize to CulturalKnowledge object. Defaults to True.
|
|
1200
|
+
|
|
1201
|
+
Returns:
|
|
1202
|
+
Optional[Union[CulturalKnowledge, Dict[str, Any]]]: The cultural knowledge if found, None otherwise.
|
|
1203
|
+
|
|
1204
|
+
Raises:
|
|
1205
|
+
Exception: If an error occurs during retrieval.
|
|
1206
|
+
"""
|
|
1207
|
+
try:
|
|
1208
|
+
cultural_knowledge = self._read_json_file(self.culture_table_name)
|
|
1209
|
+
|
|
1210
|
+
for item in cultural_knowledge:
|
|
1211
|
+
if item.get("id") == id:
|
|
1212
|
+
if not deserialize:
|
|
1213
|
+
return item
|
|
1214
|
+
return deserialize_cultural_knowledge_from_db(item)
|
|
1215
|
+
|
|
1001
1216
|
return None
|
|
1217
|
+
except Exception as e:
|
|
1218
|
+
log_warning(f"Error getting cultural knowledge: {e}")
|
|
1219
|
+
raise e
|
|
1220
|
+
|
|
1221
|
+
def get_all_cultural_knowledge(
|
|
1222
|
+
self,
|
|
1223
|
+
agent_id: Optional[str] = None,
|
|
1224
|
+
team_id: Optional[str] = None,
|
|
1225
|
+
name: Optional[str] = None,
|
|
1226
|
+
limit: Optional[int] = None,
|
|
1227
|
+
page: Optional[int] = None,
|
|
1228
|
+
sort_by: Optional[str] = None,
|
|
1229
|
+
sort_order: Optional[str] = None,
|
|
1230
|
+
deserialize: Optional[bool] = True,
|
|
1231
|
+
) -> Union[List[CulturalKnowledge], Tuple[List[Dict[str, Any]], int]]:
|
|
1232
|
+
"""Get all cultural knowledge with filtering and pagination.
|
|
1233
|
+
|
|
1234
|
+
Args:
|
|
1235
|
+
agent_id (Optional[str]): Filter by agent ID.
|
|
1236
|
+
team_id (Optional[str]): Filter by team ID.
|
|
1237
|
+
name (Optional[str]): Filter by name (case-insensitive partial match).
|
|
1238
|
+
limit (Optional[int]): Maximum number of results to return.
|
|
1239
|
+
page (Optional[int]): Page number for pagination.
|
|
1240
|
+
sort_by (Optional[str]): Field to sort by.
|
|
1241
|
+
sort_order (Optional[str]): Sort order ('asc' or 'desc').
|
|
1242
|
+
deserialize (Optional[bool]): Whether to deserialize to CulturalKnowledge objects. Defaults to True.
|
|
1243
|
+
|
|
1244
|
+
Returns:
|
|
1245
|
+
Union[List[CulturalKnowledge], Tuple[List[Dict[str, Any]], int]]:
|
|
1246
|
+
- When deserialize=True: List of CulturalKnowledge objects
|
|
1247
|
+
- When deserialize=False: Tuple with list of dictionaries and total count
|
|
1248
|
+
|
|
1249
|
+
Raises:
|
|
1250
|
+
Exception: If an error occurs during retrieval.
|
|
1251
|
+
"""
|
|
1252
|
+
try:
|
|
1253
|
+
cultural_knowledge = self._read_json_file(self.culture_table_name)
|
|
1254
|
+
|
|
1255
|
+
# Apply filters
|
|
1256
|
+
filtered_items = []
|
|
1257
|
+
for item in cultural_knowledge:
|
|
1258
|
+
if agent_id is not None and item.get("agent_id") != agent_id:
|
|
1259
|
+
continue
|
|
1260
|
+
if team_id is not None and item.get("team_id") != team_id:
|
|
1261
|
+
continue
|
|
1262
|
+
if name is not None and name.lower() not in item.get("name", "").lower():
|
|
1263
|
+
continue
|
|
1264
|
+
|
|
1265
|
+
filtered_items.append(item)
|
|
1266
|
+
|
|
1267
|
+
total_count = len(filtered_items)
|
|
1268
|
+
|
|
1269
|
+
# Apply sorting
|
|
1270
|
+
filtered_items = apply_sorting(filtered_items, sort_by, sort_order)
|
|
1271
|
+
|
|
1272
|
+
# Apply pagination
|
|
1273
|
+
if limit is not None:
|
|
1274
|
+
start_idx = 0
|
|
1275
|
+
if page is not None:
|
|
1276
|
+
start_idx = (page - 1) * limit
|
|
1277
|
+
filtered_items = filtered_items[start_idx : start_idx + limit]
|
|
1278
|
+
|
|
1279
|
+
if not deserialize:
|
|
1280
|
+
return filtered_items, total_count
|
|
1281
|
+
|
|
1282
|
+
return [deserialize_cultural_knowledge_from_db(item) for item in filtered_items]
|
|
1283
|
+
|
|
1284
|
+
except Exception as e:
|
|
1285
|
+
log_warning(f"Error getting all cultural knowledge: {e}")
|
|
1286
|
+
raise e
|
|
1287
|
+
|
|
1288
|
+
def upsert_cultural_knowledge(
|
|
1289
|
+
self, cultural_knowledge: CulturalKnowledge, deserialize: Optional[bool] = True
|
|
1290
|
+
) -> Optional[Union[CulturalKnowledge, Dict[str, Any]]]:
|
|
1291
|
+
"""Upsert cultural knowledge in the GCS JSON file.
|
|
1292
|
+
|
|
1293
|
+
Args:
|
|
1294
|
+
cultural_knowledge (CulturalKnowledge): The cultural knowledge to upsert.
|
|
1295
|
+
deserialize (Optional[bool]): Whether to deserialize the result. Defaults to True.
|
|
1296
|
+
|
|
1297
|
+
Returns:
|
|
1298
|
+
Optional[Union[CulturalKnowledge, Dict[str, Any]]]: The upserted cultural knowledge.
|
|
1299
|
+
|
|
1300
|
+
Raises:
|
|
1301
|
+
Exception: If an error occurs during upsert.
|
|
1302
|
+
"""
|
|
1303
|
+
try:
|
|
1304
|
+
cultural_knowledge_list = self._read_json_file(self.culture_table_name, create_table_if_not_found=True)
|
|
1305
|
+
|
|
1306
|
+
# Serialize content, categories, and notes into a dict for DB storage
|
|
1307
|
+
content_dict = serialize_cultural_knowledge_for_db(cultural_knowledge)
|
|
1308
|
+
|
|
1309
|
+
# Create the item dict with serialized content
|
|
1310
|
+
cultural_knowledge_dict = {
|
|
1311
|
+
"id": cultural_knowledge.id,
|
|
1312
|
+
"name": cultural_knowledge.name,
|
|
1313
|
+
"summary": cultural_knowledge.summary,
|
|
1314
|
+
"content": content_dict if content_dict else None,
|
|
1315
|
+
"metadata": cultural_knowledge.metadata,
|
|
1316
|
+
"input": cultural_knowledge.input,
|
|
1317
|
+
"created_at": cultural_knowledge.created_at,
|
|
1318
|
+
"updated_at": int(time.time()),
|
|
1319
|
+
"agent_id": cultural_knowledge.agent_id,
|
|
1320
|
+
"team_id": cultural_knowledge.team_id,
|
|
1321
|
+
}
|
|
1322
|
+
|
|
1323
|
+
# Find existing item to update
|
|
1324
|
+
item_updated = False
|
|
1325
|
+
for i, existing_item in enumerate(cultural_knowledge_list):
|
|
1326
|
+
if existing_item.get("id") == cultural_knowledge.id:
|
|
1327
|
+
cultural_knowledge_list[i] = cultural_knowledge_dict
|
|
1328
|
+
item_updated = True
|
|
1329
|
+
break
|
|
1330
|
+
|
|
1331
|
+
if not item_updated:
|
|
1332
|
+
cultural_knowledge_list.append(cultural_knowledge_dict)
|
|
1333
|
+
|
|
1334
|
+
self._write_json_file(self.culture_table_name, cultural_knowledge_list)
|
|
1335
|
+
|
|
1336
|
+
if not deserialize:
|
|
1337
|
+
return cultural_knowledge_dict
|
|
1338
|
+
|
|
1339
|
+
return deserialize_cultural_knowledge_from_db(cultural_knowledge_dict)
|
|
1340
|
+
|
|
1341
|
+
except Exception as e:
|
|
1342
|
+
log_warning(f"Error upserting cultural knowledge: {e}")
|
|
1343
|
+
raise e
|