agno 2.2.13__py3-none-any.whl → 2.4.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agno/agent/__init__.py +6 -0
- agno/agent/agent.py +5252 -3145
- agno/agent/remote.py +525 -0
- agno/api/api.py +2 -0
- agno/client/__init__.py +3 -0
- agno/client/a2a/__init__.py +10 -0
- agno/client/a2a/client.py +554 -0
- agno/client/a2a/schemas.py +112 -0
- agno/client/a2a/utils.py +369 -0
- agno/client/os.py +2669 -0
- agno/compression/__init__.py +3 -0
- agno/compression/manager.py +247 -0
- agno/culture/manager.py +2 -2
- agno/db/base.py +927 -6
- agno/db/dynamo/dynamo.py +788 -2
- agno/db/dynamo/schemas.py +128 -0
- agno/db/dynamo/utils.py +26 -3
- agno/db/firestore/firestore.py +674 -50
- agno/db/firestore/schemas.py +41 -0
- agno/db/firestore/utils.py +25 -10
- agno/db/gcs_json/gcs_json_db.py +506 -3
- agno/db/gcs_json/utils.py +14 -2
- agno/db/in_memory/in_memory_db.py +203 -4
- agno/db/in_memory/utils.py +14 -2
- agno/db/json/json_db.py +498 -2
- agno/db/json/utils.py +14 -2
- agno/db/migrations/manager.py +199 -0
- agno/db/migrations/utils.py +19 -0
- agno/db/migrations/v1_to_v2.py +54 -16
- agno/db/migrations/versions/__init__.py +0 -0
- agno/db/migrations/versions/v2_3_0.py +977 -0
- agno/db/mongo/async_mongo.py +1013 -39
- agno/db/mongo/mongo.py +684 -4
- agno/db/mongo/schemas.py +48 -0
- agno/db/mongo/utils.py +17 -0
- agno/db/mysql/__init__.py +2 -1
- agno/db/mysql/async_mysql.py +2958 -0
- agno/db/mysql/mysql.py +722 -53
- agno/db/mysql/schemas.py +77 -11
- agno/db/mysql/utils.py +151 -8
- agno/db/postgres/async_postgres.py +1254 -137
- agno/db/postgres/postgres.py +2316 -93
- agno/db/postgres/schemas.py +153 -21
- agno/db/postgres/utils.py +22 -7
- agno/db/redis/redis.py +531 -3
- agno/db/redis/schemas.py +36 -0
- agno/db/redis/utils.py +31 -15
- agno/db/schemas/evals.py +1 -0
- agno/db/schemas/memory.py +20 -9
- agno/db/singlestore/schemas.py +70 -1
- agno/db/singlestore/singlestore.py +737 -74
- agno/db/singlestore/utils.py +13 -3
- agno/db/sqlite/async_sqlite.py +1069 -89
- agno/db/sqlite/schemas.py +133 -1
- agno/db/sqlite/sqlite.py +2203 -165
- agno/db/sqlite/utils.py +21 -11
- agno/db/surrealdb/models.py +25 -0
- agno/db/surrealdb/surrealdb.py +603 -1
- agno/db/utils.py +60 -0
- agno/eval/__init__.py +26 -3
- agno/eval/accuracy.py +25 -12
- agno/eval/agent_as_judge.py +871 -0
- agno/eval/base.py +29 -0
- agno/eval/performance.py +10 -4
- agno/eval/reliability.py +22 -13
- agno/eval/utils.py +2 -1
- agno/exceptions.py +42 -0
- agno/hooks/__init__.py +3 -0
- agno/hooks/decorator.py +164 -0
- agno/integrations/discord/client.py +13 -2
- agno/knowledge/__init__.py +4 -0
- agno/knowledge/chunking/code.py +90 -0
- agno/knowledge/chunking/document.py +65 -4
- agno/knowledge/chunking/fixed.py +4 -1
- agno/knowledge/chunking/markdown.py +102 -11
- agno/knowledge/chunking/recursive.py +2 -2
- agno/knowledge/chunking/semantic.py +130 -48
- agno/knowledge/chunking/strategy.py +18 -0
- agno/knowledge/embedder/azure_openai.py +0 -1
- agno/knowledge/embedder/google.py +1 -1
- agno/knowledge/embedder/mistral.py +1 -1
- agno/knowledge/embedder/nebius.py +1 -1
- agno/knowledge/embedder/openai.py +16 -12
- agno/knowledge/filesystem.py +412 -0
- agno/knowledge/knowledge.py +4261 -1199
- agno/knowledge/protocol.py +134 -0
- agno/knowledge/reader/arxiv_reader.py +3 -2
- agno/knowledge/reader/base.py +9 -7
- agno/knowledge/reader/csv_reader.py +91 -42
- agno/knowledge/reader/docx_reader.py +9 -10
- agno/knowledge/reader/excel_reader.py +225 -0
- agno/knowledge/reader/field_labeled_csv_reader.py +38 -48
- agno/knowledge/reader/firecrawl_reader.py +3 -2
- agno/knowledge/reader/json_reader.py +16 -22
- agno/knowledge/reader/markdown_reader.py +15 -14
- agno/knowledge/reader/pdf_reader.py +33 -28
- agno/knowledge/reader/pptx_reader.py +9 -10
- agno/knowledge/reader/reader_factory.py +135 -1
- agno/knowledge/reader/s3_reader.py +8 -16
- agno/knowledge/reader/tavily_reader.py +3 -3
- agno/knowledge/reader/text_reader.py +15 -14
- agno/knowledge/reader/utils/__init__.py +17 -0
- agno/knowledge/reader/utils/spreadsheet.py +114 -0
- agno/knowledge/reader/web_search_reader.py +8 -65
- agno/knowledge/reader/website_reader.py +16 -13
- agno/knowledge/reader/wikipedia_reader.py +36 -3
- agno/knowledge/reader/youtube_reader.py +3 -2
- agno/knowledge/remote_content/__init__.py +33 -0
- agno/knowledge/remote_content/config.py +266 -0
- agno/knowledge/remote_content/remote_content.py +105 -17
- agno/knowledge/utils.py +76 -22
- agno/learn/__init__.py +71 -0
- agno/learn/config.py +463 -0
- agno/learn/curate.py +185 -0
- agno/learn/machine.py +725 -0
- agno/learn/schemas.py +1114 -0
- agno/learn/stores/__init__.py +38 -0
- agno/learn/stores/decision_log.py +1156 -0
- agno/learn/stores/entity_memory.py +3275 -0
- agno/learn/stores/learned_knowledge.py +1583 -0
- agno/learn/stores/protocol.py +117 -0
- agno/learn/stores/session_context.py +1217 -0
- agno/learn/stores/user_memory.py +1495 -0
- agno/learn/stores/user_profile.py +1220 -0
- agno/learn/utils.py +209 -0
- agno/media.py +22 -6
- agno/memory/__init__.py +14 -1
- agno/memory/manager.py +223 -8
- agno/memory/strategies/__init__.py +15 -0
- agno/memory/strategies/base.py +66 -0
- agno/memory/strategies/summarize.py +196 -0
- agno/memory/strategies/types.py +37 -0
- agno/models/aimlapi/aimlapi.py +17 -0
- agno/models/anthropic/claude.py +434 -59
- agno/models/aws/bedrock.py +121 -20
- agno/models/aws/claude.py +131 -274
- agno/models/azure/ai_foundry.py +10 -6
- agno/models/azure/openai_chat.py +33 -10
- agno/models/base.py +1162 -561
- agno/models/cerebras/cerebras.py +120 -24
- agno/models/cerebras/cerebras_openai.py +21 -2
- agno/models/cohere/chat.py +65 -6
- agno/models/cometapi/cometapi.py +18 -1
- agno/models/dashscope/dashscope.py +2 -3
- agno/models/deepinfra/deepinfra.py +18 -1
- agno/models/deepseek/deepseek.py +69 -3
- agno/models/fireworks/fireworks.py +18 -1
- agno/models/google/gemini.py +959 -89
- agno/models/google/utils.py +22 -0
- agno/models/groq/groq.py +48 -18
- agno/models/huggingface/huggingface.py +17 -6
- agno/models/ibm/watsonx.py +16 -6
- agno/models/internlm/internlm.py +18 -1
- agno/models/langdb/langdb.py +13 -1
- agno/models/litellm/chat.py +88 -9
- agno/models/litellm/litellm_openai.py +18 -1
- agno/models/message.py +24 -5
- agno/models/meta/llama.py +40 -13
- agno/models/meta/llama_openai.py +22 -21
- agno/models/metrics.py +12 -0
- agno/models/mistral/mistral.py +8 -4
- agno/models/n1n/__init__.py +3 -0
- agno/models/n1n/n1n.py +57 -0
- agno/models/nebius/nebius.py +6 -7
- agno/models/nvidia/nvidia.py +20 -3
- agno/models/ollama/__init__.py +2 -0
- agno/models/ollama/chat.py +17 -6
- agno/models/ollama/responses.py +100 -0
- agno/models/openai/__init__.py +2 -0
- agno/models/openai/chat.py +117 -26
- agno/models/openai/open_responses.py +46 -0
- agno/models/openai/responses.py +110 -32
- agno/models/openrouter/__init__.py +2 -0
- agno/models/openrouter/openrouter.py +67 -2
- agno/models/openrouter/responses.py +146 -0
- agno/models/perplexity/perplexity.py +19 -1
- agno/models/portkey/portkey.py +7 -6
- agno/models/requesty/requesty.py +19 -2
- agno/models/response.py +20 -2
- agno/models/sambanova/sambanova.py +20 -3
- agno/models/siliconflow/siliconflow.py +19 -2
- agno/models/together/together.py +20 -3
- agno/models/vercel/v0.py +20 -3
- agno/models/vertexai/claude.py +124 -4
- agno/models/vllm/vllm.py +19 -14
- agno/models/xai/xai.py +19 -2
- agno/os/app.py +467 -137
- agno/os/auth.py +253 -5
- agno/os/config.py +22 -0
- agno/os/interfaces/a2a/a2a.py +7 -6
- agno/os/interfaces/a2a/router.py +635 -26
- agno/os/interfaces/a2a/utils.py +32 -33
- agno/os/interfaces/agui/agui.py +5 -3
- agno/os/interfaces/agui/router.py +26 -16
- agno/os/interfaces/agui/utils.py +97 -57
- agno/os/interfaces/base.py +7 -7
- agno/os/interfaces/slack/router.py +16 -7
- agno/os/interfaces/slack/slack.py +7 -7
- agno/os/interfaces/whatsapp/router.py +35 -7
- agno/os/interfaces/whatsapp/security.py +3 -1
- agno/os/interfaces/whatsapp/whatsapp.py +11 -8
- agno/os/managers.py +326 -0
- agno/os/mcp.py +652 -79
- agno/os/middleware/__init__.py +4 -0
- agno/os/middleware/jwt.py +718 -115
- agno/os/middleware/trailing_slash.py +27 -0
- agno/os/router.py +105 -1558
- agno/os/routers/agents/__init__.py +3 -0
- agno/os/routers/agents/router.py +655 -0
- agno/os/routers/agents/schema.py +288 -0
- agno/os/routers/components/__init__.py +3 -0
- agno/os/routers/components/components.py +475 -0
- agno/os/routers/database.py +155 -0
- agno/os/routers/evals/evals.py +111 -18
- agno/os/routers/evals/schemas.py +38 -5
- agno/os/routers/evals/utils.py +80 -11
- agno/os/routers/health.py +3 -3
- agno/os/routers/knowledge/knowledge.py +284 -35
- agno/os/routers/knowledge/schemas.py +14 -2
- agno/os/routers/memory/memory.py +274 -11
- agno/os/routers/memory/schemas.py +44 -3
- agno/os/routers/metrics/metrics.py +30 -15
- agno/os/routers/metrics/schemas.py +10 -6
- agno/os/routers/registry/__init__.py +3 -0
- agno/os/routers/registry/registry.py +337 -0
- agno/os/routers/session/session.py +143 -14
- agno/os/routers/teams/__init__.py +3 -0
- agno/os/routers/teams/router.py +550 -0
- agno/os/routers/teams/schema.py +280 -0
- agno/os/routers/traces/__init__.py +3 -0
- agno/os/routers/traces/schemas.py +414 -0
- agno/os/routers/traces/traces.py +549 -0
- agno/os/routers/workflows/__init__.py +3 -0
- agno/os/routers/workflows/router.py +757 -0
- agno/os/routers/workflows/schema.py +139 -0
- agno/os/schema.py +157 -584
- agno/os/scopes.py +469 -0
- agno/os/settings.py +3 -0
- agno/os/utils.py +574 -185
- agno/reasoning/anthropic.py +85 -1
- agno/reasoning/azure_ai_foundry.py +93 -1
- agno/reasoning/deepseek.py +102 -2
- agno/reasoning/default.py +6 -7
- agno/reasoning/gemini.py +87 -3
- agno/reasoning/groq.py +109 -2
- agno/reasoning/helpers.py +6 -7
- agno/reasoning/manager.py +1238 -0
- agno/reasoning/ollama.py +93 -1
- agno/reasoning/openai.py +115 -1
- agno/reasoning/vertexai.py +85 -1
- agno/registry/__init__.py +3 -0
- agno/registry/registry.py +68 -0
- agno/remote/__init__.py +3 -0
- agno/remote/base.py +581 -0
- agno/run/__init__.py +2 -4
- agno/run/agent.py +134 -19
- agno/run/base.py +49 -1
- agno/run/cancel.py +65 -52
- agno/run/cancellation_management/__init__.py +9 -0
- agno/run/cancellation_management/base.py +78 -0
- agno/run/cancellation_management/in_memory_cancellation_manager.py +100 -0
- agno/run/cancellation_management/redis_cancellation_manager.py +236 -0
- agno/run/requirement.py +181 -0
- agno/run/team.py +111 -19
- agno/run/workflow.py +2 -1
- agno/session/agent.py +57 -92
- agno/session/summary.py +1 -1
- agno/session/team.py +62 -115
- agno/session/workflow.py +353 -57
- agno/skills/__init__.py +17 -0
- agno/skills/agent_skills.py +377 -0
- agno/skills/errors.py +32 -0
- agno/skills/loaders/__init__.py +4 -0
- agno/skills/loaders/base.py +27 -0
- agno/skills/loaders/local.py +216 -0
- agno/skills/skill.py +65 -0
- agno/skills/utils.py +107 -0
- agno/skills/validator.py +277 -0
- agno/table.py +10 -0
- agno/team/__init__.py +5 -1
- agno/team/remote.py +447 -0
- agno/team/team.py +3769 -2202
- agno/tools/brandfetch.py +27 -18
- agno/tools/browserbase.py +225 -16
- agno/tools/crawl4ai.py +3 -0
- agno/tools/duckduckgo.py +25 -71
- agno/tools/exa.py +0 -21
- agno/tools/file.py +14 -13
- agno/tools/file_generation.py +12 -6
- agno/tools/firecrawl.py +15 -7
- agno/tools/function.py +94 -113
- agno/tools/google_bigquery.py +11 -2
- agno/tools/google_drive.py +4 -3
- agno/tools/knowledge.py +9 -4
- agno/tools/mcp/mcp.py +301 -18
- agno/tools/mcp/multi_mcp.py +269 -14
- agno/tools/mem0.py +11 -10
- agno/tools/memory.py +47 -46
- agno/tools/mlx_transcribe.py +10 -7
- agno/tools/models/nebius.py +5 -5
- agno/tools/models_labs.py +20 -10
- agno/tools/nano_banana.py +151 -0
- agno/tools/parallel.py +0 -7
- agno/tools/postgres.py +76 -36
- agno/tools/python.py +14 -6
- agno/tools/reasoning.py +30 -23
- agno/tools/redshift.py +406 -0
- agno/tools/shopify.py +1519 -0
- agno/tools/spotify.py +919 -0
- agno/tools/tavily.py +4 -1
- agno/tools/toolkit.py +253 -18
- agno/tools/websearch.py +93 -0
- agno/tools/website.py +1 -1
- agno/tools/wikipedia.py +1 -1
- agno/tools/workflow.py +56 -48
- agno/tools/yfinance.py +12 -11
- agno/tracing/__init__.py +12 -0
- agno/tracing/exporter.py +161 -0
- agno/tracing/schemas.py +276 -0
- agno/tracing/setup.py +112 -0
- agno/utils/agent.py +251 -10
- agno/utils/cryptography.py +22 -0
- agno/utils/dttm.py +33 -0
- agno/utils/events.py +264 -7
- agno/utils/hooks.py +111 -3
- agno/utils/http.py +161 -2
- agno/utils/mcp.py +49 -8
- agno/utils/media.py +22 -1
- agno/utils/models/ai_foundry.py +9 -2
- agno/utils/models/claude.py +20 -5
- agno/utils/models/cohere.py +9 -2
- agno/utils/models/llama.py +9 -2
- agno/utils/models/mistral.py +4 -2
- agno/utils/os.py +0 -0
- agno/utils/print_response/agent.py +99 -16
- agno/utils/print_response/team.py +223 -24
- agno/utils/print_response/workflow.py +0 -2
- agno/utils/prompts.py +8 -6
- agno/utils/remote.py +23 -0
- agno/utils/response.py +1 -13
- agno/utils/string.py +91 -2
- agno/utils/team.py +62 -12
- agno/utils/tokens.py +657 -0
- agno/vectordb/base.py +15 -2
- agno/vectordb/cassandra/cassandra.py +1 -1
- agno/vectordb/chroma/__init__.py +2 -1
- agno/vectordb/chroma/chromadb.py +468 -23
- agno/vectordb/clickhouse/clickhousedb.py +1 -1
- agno/vectordb/couchbase/couchbase.py +6 -2
- agno/vectordb/lancedb/lance_db.py +7 -38
- agno/vectordb/lightrag/lightrag.py +7 -6
- agno/vectordb/milvus/milvus.py +118 -84
- agno/vectordb/mongodb/__init__.py +2 -1
- agno/vectordb/mongodb/mongodb.py +14 -31
- agno/vectordb/pgvector/pgvector.py +120 -66
- agno/vectordb/pineconedb/pineconedb.py +2 -19
- agno/vectordb/qdrant/__init__.py +2 -1
- agno/vectordb/qdrant/qdrant.py +33 -56
- agno/vectordb/redis/__init__.py +2 -1
- agno/vectordb/redis/redisdb.py +19 -31
- agno/vectordb/singlestore/singlestore.py +17 -9
- agno/vectordb/surrealdb/surrealdb.py +2 -38
- agno/vectordb/weaviate/__init__.py +2 -1
- agno/vectordb/weaviate/weaviate.py +7 -3
- agno/workflow/__init__.py +5 -1
- agno/workflow/agent.py +2 -2
- agno/workflow/condition.py +12 -10
- agno/workflow/loop.py +28 -9
- agno/workflow/parallel.py +21 -13
- agno/workflow/remote.py +362 -0
- agno/workflow/router.py +12 -9
- agno/workflow/step.py +261 -36
- agno/workflow/steps.py +12 -8
- agno/workflow/types.py +40 -77
- agno/workflow/workflow.py +939 -213
- {agno-2.2.13.dist-info → agno-2.4.3.dist-info}/METADATA +134 -181
- agno-2.4.3.dist-info/RECORD +677 -0
- {agno-2.2.13.dist-info → agno-2.4.3.dist-info}/WHEEL +1 -1
- agno/tools/googlesearch.py +0 -98
- agno/tools/memori.py +0 -339
- agno-2.2.13.dist-info/RECORD +0 -575
- {agno-2.2.13.dist-info → agno-2.4.3.dist-info}/licenses/LICENSE +0 -0
- {agno-2.2.13.dist-info → agno-2.4.3.dist-info}/top_level.txt +0 -0
|
@@ -1,9 +1,13 @@
|
|
|
1
1
|
import time
|
|
2
2
|
from datetime import date, datetime, timedelta, timezone
|
|
3
|
-
from typing import Any, Dict, List, Optional, Sequence, Tuple, Union
|
|
3
|
+
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Tuple, Union, cast
|
|
4
4
|
from uuid import uuid4
|
|
5
5
|
|
|
6
|
-
|
|
6
|
+
if TYPE_CHECKING:
|
|
7
|
+
from agno.tracing.schemas import Span, Trace
|
|
8
|
+
|
|
9
|
+
from agno.db.base import AsyncBaseDb, ComponentType, SessionType
|
|
10
|
+
from agno.db.migrations.manager import MigrationManager
|
|
7
11
|
from agno.db.postgres.schemas import get_table_schema_definition
|
|
8
12
|
from agno.db.postgres.utils import (
|
|
9
13
|
abulk_upsert_metrics,
|
|
@@ -23,12 +27,15 @@ from agno.db.schemas.knowledge import KnowledgeRow
|
|
|
23
27
|
from agno.db.schemas.memory import UserMemory
|
|
24
28
|
from agno.session import AgentSession, Session, TeamSession, WorkflowSession
|
|
25
29
|
from agno.utils.log import log_debug, log_error, log_info, log_warning
|
|
30
|
+
from agno.utils.string import sanitize_postgres_string, sanitize_postgres_strings
|
|
26
31
|
|
|
27
32
|
try:
|
|
28
|
-
from sqlalchemy import Index, String, UniqueConstraint, func, update
|
|
33
|
+
from sqlalchemy import ForeignKey, Index, String, Table, UniqueConstraint, and_, case, func, or_, update
|
|
29
34
|
from sqlalchemy.dialects import postgresql
|
|
35
|
+
from sqlalchemy.dialects.postgresql import TIMESTAMP
|
|
36
|
+
from sqlalchemy.exc import ProgrammingError
|
|
30
37
|
from sqlalchemy.ext.asyncio import AsyncEngine, async_sessionmaker, create_async_engine
|
|
31
|
-
from sqlalchemy.schema import Column, MetaData
|
|
38
|
+
from sqlalchemy.schema import Column, MetaData
|
|
32
39
|
from sqlalchemy.sql.expression import select, text
|
|
33
40
|
except ImportError:
|
|
34
41
|
raise ImportError("`sqlalchemy` not installed. Please install it using `pip install sqlalchemy`")
|
|
@@ -47,7 +54,11 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
47
54
|
eval_table: Optional[str] = None,
|
|
48
55
|
knowledge_table: Optional[str] = None,
|
|
49
56
|
culture_table: Optional[str] = None,
|
|
50
|
-
|
|
57
|
+
traces_table: Optional[str] = None,
|
|
58
|
+
spans_table: Optional[str] = None,
|
|
59
|
+
versions_table: Optional[str] = None,
|
|
60
|
+
learnings_table: Optional[str] = None,
|
|
61
|
+
create_schema: bool = True,
|
|
51
62
|
):
|
|
52
63
|
"""
|
|
53
64
|
Async interface for interacting with a PostgreSQL database.
|
|
@@ -57,6 +68,15 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
57
68
|
2. Use the db_url
|
|
58
69
|
3. Raise an error if neither is provided
|
|
59
70
|
|
|
71
|
+
Connection Pool Configuration:
|
|
72
|
+
When creating an engine from db_url, the following settings are applied:
|
|
73
|
+
- pool_pre_ping=True: Validates connections before use to handle terminated
|
|
74
|
+
connections (e.g., "terminating connection due to administrator command")
|
|
75
|
+
- pool_recycle=3600: Recycles connections after 1 hour to prevent stale connections
|
|
76
|
+
|
|
77
|
+
These settings help handle connection terminations gracefully. If you need
|
|
78
|
+
custom pool settings, provide a pre-configured db_engine instead.
|
|
79
|
+
|
|
60
80
|
Args:
|
|
61
81
|
id (Optional[str]): The ID of the database.
|
|
62
82
|
db_url (Optional[str]): The database URL to connect to.
|
|
@@ -68,38 +88,62 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
68
88
|
eval_table (Optional[str]): Name of the table to store evaluation runs data.
|
|
69
89
|
knowledge_table (Optional[str]): Name of the table to store knowledge content.
|
|
70
90
|
culture_table (Optional[str]): Name of the table to store cultural knowledge.
|
|
71
|
-
|
|
91
|
+
traces_table (Optional[str]): Name of the table to store run traces.
|
|
92
|
+
spans_table (Optional[str]): Name of the table to store span events.
|
|
93
|
+
versions_table (Optional[str]): Name of the table to store schema versions.
|
|
94
|
+
learnings_table (Optional[str]): Name of the table to store learnings.
|
|
95
|
+
create_schema (bool): Whether to automatically create the database schema if it doesn't exist.
|
|
96
|
+
Set to False if schema is managed externally (e.g., via migrations). Defaults to True.
|
|
72
97
|
|
|
73
98
|
Raises:
|
|
74
99
|
ValueError: If neither db_url nor db_engine is provided.
|
|
75
100
|
ValueError: If none of the tables are provided.
|
|
76
101
|
"""
|
|
77
|
-
if db_id is not None:
|
|
78
|
-
log_warning("db_id is deprecated and will be removed in a future version, use id instead.")
|
|
79
102
|
|
|
80
103
|
super().__init__(
|
|
81
|
-
id=id
|
|
104
|
+
id=id,
|
|
82
105
|
session_table=session_table,
|
|
83
106
|
memory_table=memory_table,
|
|
84
107
|
metrics_table=metrics_table,
|
|
85
108
|
eval_table=eval_table,
|
|
86
109
|
knowledge_table=knowledge_table,
|
|
87
110
|
culture_table=culture_table,
|
|
111
|
+
traces_table=traces_table,
|
|
112
|
+
spans_table=spans_table,
|
|
113
|
+
versions_table=versions_table,
|
|
114
|
+
learnings_table=learnings_table,
|
|
88
115
|
)
|
|
89
116
|
|
|
90
117
|
_engine: Optional[AsyncEngine] = db_engine
|
|
91
118
|
if _engine is None and db_url is not None:
|
|
92
|
-
_engine = create_async_engine(
|
|
119
|
+
_engine = create_async_engine(
|
|
120
|
+
db_url,
|
|
121
|
+
pool_pre_ping=True,
|
|
122
|
+
pool_recycle=3600,
|
|
123
|
+
)
|
|
93
124
|
if _engine is None:
|
|
94
125
|
raise ValueError("One of db_url or db_engine must be provided")
|
|
95
126
|
|
|
96
127
|
self.db_url: Optional[str] = db_url
|
|
97
128
|
self.db_engine: AsyncEngine = _engine
|
|
98
129
|
self.db_schema: str = db_schema if db_schema is not None else "ai"
|
|
99
|
-
self.metadata: MetaData = MetaData()
|
|
130
|
+
self.metadata: MetaData = MetaData(schema=self.db_schema)
|
|
131
|
+
self.create_schema: bool = create_schema
|
|
100
132
|
|
|
101
133
|
# Initialize database session factory
|
|
102
|
-
self.async_session_factory = async_sessionmaker(
|
|
134
|
+
self.async_session_factory = async_sessionmaker(
|
|
135
|
+
bind=self.db_engine,
|
|
136
|
+
expire_on_commit=False,
|
|
137
|
+
)
|
|
138
|
+
|
|
139
|
+
async def close(self) -> None:
|
|
140
|
+
"""Close database connections and dispose of the connection pool.
|
|
141
|
+
|
|
142
|
+
Should be called during application shutdown to properly release
|
|
143
|
+
all database connections.
|
|
144
|
+
"""
|
|
145
|
+
if self.db_engine is not None:
|
|
146
|
+
await self.db_engine.dispose()
|
|
103
147
|
|
|
104
148
|
# -- DB methods --
|
|
105
149
|
async def table_exists(self, table_name: str) -> bool:
|
|
@@ -122,25 +166,31 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
122
166
|
(self.metrics_table_name, "metrics"),
|
|
123
167
|
(self.eval_table_name, "evals"),
|
|
124
168
|
(self.knowledge_table_name, "knowledge"),
|
|
169
|
+
(self.versions_table_name, "versions"),
|
|
170
|
+
(self.learnings_table_name, "learnings"),
|
|
125
171
|
]
|
|
126
172
|
|
|
127
173
|
for table_name, table_type in tables_to_create:
|
|
128
|
-
await self.
|
|
174
|
+
await self._get_or_create_table(
|
|
175
|
+
table_name=table_name, table_type=table_type, create_table_if_not_found=True
|
|
176
|
+
)
|
|
129
177
|
|
|
130
|
-
async def _create_table(self, table_name: str, table_type: str
|
|
178
|
+
async def _create_table(self, table_name: str, table_type: str) -> Table:
|
|
131
179
|
"""
|
|
132
180
|
Create a table with the appropriate schema based on the table type.
|
|
133
181
|
|
|
134
182
|
Args:
|
|
135
183
|
table_name (str): Name of the table to create
|
|
136
184
|
table_type (str): Type of table (used to get schema definition)
|
|
137
|
-
db_schema (str): Database schema name
|
|
138
185
|
|
|
139
186
|
Returns:
|
|
140
187
|
Table: SQLAlchemy Table object
|
|
141
188
|
"""
|
|
142
189
|
try:
|
|
143
|
-
|
|
190
|
+
# Pass traces_table_name and db_schema for spans table foreign key resolution
|
|
191
|
+
table_schema = get_table_schema_definition(
|
|
192
|
+
table_type, traces_table_name=self.trace_table_name, db_schema=self.db_schema
|
|
193
|
+
).copy()
|
|
144
194
|
|
|
145
195
|
columns: List[Column] = []
|
|
146
196
|
indexes: List[str] = []
|
|
@@ -160,11 +210,15 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
160
210
|
if col_config.get("unique", False):
|
|
161
211
|
column_kwargs["unique"] = True
|
|
162
212
|
unique_constraints.append(col_name)
|
|
213
|
+
|
|
214
|
+
# Handle foreign key constraint
|
|
215
|
+
if "foreign_key" in col_config:
|
|
216
|
+
column_args.append(ForeignKey(col_config["foreign_key"]))
|
|
217
|
+
|
|
163
218
|
columns.append(Column(*column_args, **column_kwargs)) # type: ignore
|
|
164
219
|
|
|
165
220
|
# Create the table object
|
|
166
|
-
|
|
167
|
-
table = Table(table_name, table_metadata, *columns, schema=db_schema)
|
|
221
|
+
table = Table(table_name, self.metadata, *columns, schema=self.db_schema)
|
|
168
222
|
|
|
169
223
|
# Add multi-column unique constraints with table-specific names
|
|
170
224
|
for constraint in schema_unique_constraints:
|
|
@@ -177,12 +231,19 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
177
231
|
idx_name = f"idx_{table_name}_{idx_col}"
|
|
178
232
|
table.append_constraint(Index(idx_name, idx_col))
|
|
179
233
|
|
|
180
|
-
|
|
181
|
-
|
|
234
|
+
if self.create_schema:
|
|
235
|
+
async with self.async_session_factory() as sess, sess.begin():
|
|
236
|
+
await acreate_schema(session=sess, db_schema=self.db_schema)
|
|
182
237
|
|
|
183
238
|
# Create table
|
|
184
|
-
|
|
185
|
-
|
|
239
|
+
table_created = False
|
|
240
|
+
if not await self.table_exists(table_name):
|
|
241
|
+
async with self.db_engine.begin() as conn:
|
|
242
|
+
await conn.run_sync(table.create, checkfirst=True)
|
|
243
|
+
log_debug(f"Successfully created table '{table_name}'")
|
|
244
|
+
table_created = True
|
|
245
|
+
else:
|
|
246
|
+
log_debug(f"Table '{self.db_schema}.{table_name}' already exists, skipping creation")
|
|
186
247
|
|
|
187
248
|
# Create indexes
|
|
188
249
|
for idx in table.indexes:
|
|
@@ -192,111 +253,205 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
192
253
|
exists_query = text(
|
|
193
254
|
"SELECT 1 FROM pg_indexes WHERE schemaname = :schema AND indexname = :index_name"
|
|
194
255
|
)
|
|
195
|
-
result = await sess.execute(exists_query, {"schema": db_schema, "index_name": idx.name})
|
|
256
|
+
result = await sess.execute(exists_query, {"schema": self.db_schema, "index_name": idx.name})
|
|
196
257
|
exists = result.scalar() is not None
|
|
197
258
|
if exists:
|
|
198
|
-
log_debug(
|
|
259
|
+
log_debug(
|
|
260
|
+
f"Index {idx.name} already exists in {self.db_schema}.{table_name}, skipping creation"
|
|
261
|
+
)
|
|
199
262
|
continue
|
|
200
263
|
|
|
201
264
|
async with self.db_engine.begin() as conn:
|
|
202
265
|
await conn.run_sync(idx.create)
|
|
203
|
-
log_debug(f"Created index: {idx.name} for table {db_schema}.{table_name}")
|
|
266
|
+
log_debug(f"Created index: {idx.name} for table {self.db_schema}.{table_name}")
|
|
204
267
|
|
|
205
268
|
except Exception as e:
|
|
206
269
|
log_error(f"Error creating index {idx.name}: {e}")
|
|
207
270
|
|
|
208
|
-
|
|
271
|
+
# Store the schema version for the created table
|
|
272
|
+
if table_name != self.versions_table_name and table_created:
|
|
273
|
+
# Also store the schema version for the created table
|
|
274
|
+
latest_schema_version = MigrationManager(self).latest_schema_version
|
|
275
|
+
await self.upsert_schema_version(table_name=table_name, version=latest_schema_version.public)
|
|
276
|
+
log_info(
|
|
277
|
+
f"Successfully stored version {latest_schema_version.public} in database for table {table_name}"
|
|
278
|
+
)
|
|
279
|
+
|
|
209
280
|
return table
|
|
210
281
|
|
|
211
282
|
except Exception as e:
|
|
212
|
-
log_error(f"Could not create table {db_schema}.{table_name}: {e}")
|
|
283
|
+
log_error(f"Could not create table {self.db_schema}.{table_name}: {e}")
|
|
213
284
|
raise
|
|
214
285
|
|
|
215
|
-
async def _get_table(self, table_type: str) -> Table:
|
|
286
|
+
async def _get_table(self, table_type: str, create_table_if_not_found: Optional[bool] = False) -> Table:
|
|
216
287
|
if table_type == "sessions":
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
288
|
+
self.session_table = await self._get_or_create_table(
|
|
289
|
+
table_name=self.session_table_name,
|
|
290
|
+
table_type="sessions",
|
|
291
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
292
|
+
)
|
|
221
293
|
return self.session_table
|
|
222
294
|
|
|
223
295
|
if table_type == "memories":
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
296
|
+
self.memory_table = await self._get_or_create_table(
|
|
297
|
+
table_name=self.memory_table_name,
|
|
298
|
+
table_type="memories",
|
|
299
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
300
|
+
)
|
|
228
301
|
return self.memory_table
|
|
229
302
|
|
|
230
303
|
if table_type == "metrics":
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
304
|
+
self.metrics_table = await self._get_or_create_table(
|
|
305
|
+
table_name=self.metrics_table_name,
|
|
306
|
+
table_type="metrics",
|
|
307
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
308
|
+
)
|
|
235
309
|
return self.metrics_table
|
|
236
310
|
|
|
237
311
|
if table_type == "evals":
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
312
|
+
self.eval_table = await self._get_or_create_table(
|
|
313
|
+
table_name=self.eval_table_name,
|
|
314
|
+
table_type="evals",
|
|
315
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
316
|
+
)
|
|
242
317
|
return self.eval_table
|
|
243
318
|
|
|
244
319
|
if table_type == "knowledge":
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
320
|
+
self.knowledge_table = await self._get_or_create_table(
|
|
321
|
+
table_name=self.knowledge_table_name,
|
|
322
|
+
table_type="knowledge",
|
|
323
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
324
|
+
)
|
|
249
325
|
return self.knowledge_table
|
|
250
326
|
|
|
251
327
|
if table_type == "culture":
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
328
|
+
self.culture_table = await self._get_or_create_table(
|
|
329
|
+
table_name=self.culture_table_name,
|
|
330
|
+
table_type="culture",
|
|
331
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
332
|
+
)
|
|
256
333
|
return self.culture_table
|
|
257
334
|
|
|
335
|
+
if table_type == "versions":
|
|
336
|
+
self.versions_table = await self._get_or_create_table(
|
|
337
|
+
table_name=self.versions_table_name,
|
|
338
|
+
table_type="versions",
|
|
339
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
340
|
+
)
|
|
341
|
+
return self.versions_table
|
|
342
|
+
|
|
343
|
+
if table_type == "traces":
|
|
344
|
+
self.traces_table = await self._get_or_create_table(
|
|
345
|
+
table_name=self.trace_table_name,
|
|
346
|
+
table_type="traces",
|
|
347
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
348
|
+
)
|
|
349
|
+
return self.traces_table
|
|
350
|
+
|
|
351
|
+
if table_type == "spans":
|
|
352
|
+
# Ensure traces table exists first (spans has FK to traces)
|
|
353
|
+
if create_table_if_not_found:
|
|
354
|
+
await self._get_table(table_type="traces", create_table_if_not_found=True)
|
|
355
|
+
self.spans_table = await self._get_or_create_table(
|
|
356
|
+
table_name=self.span_table_name,
|
|
357
|
+
table_type="spans",
|
|
358
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
359
|
+
)
|
|
360
|
+
return self.spans_table
|
|
361
|
+
|
|
362
|
+
if table_type == "learnings":
|
|
363
|
+
self.learnings_table = await self._get_or_create_table(
|
|
364
|
+
table_name=self.learnings_table_name,
|
|
365
|
+
table_type="learnings",
|
|
366
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
367
|
+
)
|
|
368
|
+
return self.learnings_table
|
|
369
|
+
|
|
258
370
|
raise ValueError(f"Unknown table type: {table_type}")
|
|
259
371
|
|
|
260
|
-
async def _get_or_create_table(
|
|
372
|
+
async def _get_or_create_table(
|
|
373
|
+
self, table_name: str, table_type: str, create_table_if_not_found: Optional[bool] = False
|
|
374
|
+
) -> Table:
|
|
261
375
|
"""
|
|
262
376
|
Check if the table exists and is valid, else create it.
|
|
263
377
|
|
|
264
378
|
Args:
|
|
265
379
|
table_name (str): Name of the table to get or create
|
|
266
380
|
table_type (str): Type of table (used to get schema definition)
|
|
267
|
-
db_schema (str): Database schema name
|
|
268
381
|
|
|
269
382
|
Returns:
|
|
270
383
|
Table: SQLAlchemy Table object representing the schema.
|
|
271
384
|
"""
|
|
272
385
|
|
|
273
386
|
async with self.async_session_factory() as sess, sess.begin():
|
|
274
|
-
table_is_available = await ais_table_available(
|
|
387
|
+
table_is_available = await ais_table_available(
|
|
388
|
+
session=sess, table_name=table_name, db_schema=self.db_schema
|
|
389
|
+
)
|
|
275
390
|
|
|
276
|
-
if not table_is_available:
|
|
277
|
-
return await self._create_table(table_name=table_name, table_type=table_type
|
|
391
|
+
if (not table_is_available) and create_table_if_not_found:
|
|
392
|
+
return await self._create_table(table_name=table_name, table_type=table_type)
|
|
278
393
|
|
|
279
394
|
if not await ais_valid_table(
|
|
280
395
|
db_engine=self.db_engine,
|
|
281
396
|
table_name=table_name,
|
|
282
397
|
table_type=table_type,
|
|
283
|
-
db_schema=db_schema,
|
|
398
|
+
db_schema=self.db_schema,
|
|
284
399
|
):
|
|
285
|
-
raise ValueError(f"Table {db_schema}.{table_name} has an invalid schema")
|
|
400
|
+
raise ValueError(f"Table {self.db_schema}.{table_name} has an invalid schema")
|
|
286
401
|
|
|
287
402
|
try:
|
|
288
403
|
async with self.db_engine.connect() as conn:
|
|
289
404
|
|
|
290
405
|
def create_table(connection):
|
|
291
|
-
return Table(table_name, self.metadata, schema=db_schema, autoload_with=connection)
|
|
406
|
+
return Table(table_name, self.metadata, schema=self.db_schema, autoload_with=connection)
|
|
292
407
|
|
|
293
408
|
table = await conn.run_sync(create_table)
|
|
409
|
+
|
|
294
410
|
return table
|
|
295
411
|
|
|
296
412
|
except Exception as e:
|
|
297
|
-
log_error(f"Error loading existing table {db_schema}.{table_name}: {e}")
|
|
413
|
+
log_error(f"Error loading existing table {self.db_schema}.{table_name}: {e}")
|
|
298
414
|
raise
|
|
299
415
|
|
|
416
|
+
async def get_latest_schema_version(self, table_name: str) -> str:
|
|
417
|
+
"""Get the latest version of the database schema."""
|
|
418
|
+
table = await self._get_table(table_type="versions", create_table_if_not_found=True)
|
|
419
|
+
if table is None:
|
|
420
|
+
return "2.0.0"
|
|
421
|
+
|
|
422
|
+
async with self.async_session_factory() as sess:
|
|
423
|
+
stmt = select(table)
|
|
424
|
+
# Latest version for the given table
|
|
425
|
+
stmt = stmt.where(table.c.table_name == table_name)
|
|
426
|
+
stmt = stmt.order_by(table.c.version.desc()).limit(1)
|
|
427
|
+
result = await sess.execute(stmt)
|
|
428
|
+
row = result.fetchone()
|
|
429
|
+
if row is None:
|
|
430
|
+
return "2.0.0"
|
|
431
|
+
|
|
432
|
+
version_dict = dict(row._mapping)
|
|
433
|
+
return version_dict.get("version") or "2.0.0"
|
|
434
|
+
|
|
435
|
+
async def upsert_schema_version(self, table_name: str, version: str) -> None:
|
|
436
|
+
"""Upsert the schema version into the database."""
|
|
437
|
+
table = await self._get_table(table_type="versions", create_table_if_not_found=True)
|
|
438
|
+
if table is None:
|
|
439
|
+
return
|
|
440
|
+
current_datetime = datetime.now().isoformat()
|
|
441
|
+
async with self.async_session_factory() as sess, sess.begin():
|
|
442
|
+
stmt = postgresql.insert(table).values(
|
|
443
|
+
table_name=table_name,
|
|
444
|
+
version=version,
|
|
445
|
+
created_at=current_datetime, # Store as ISO format string
|
|
446
|
+
updated_at=current_datetime,
|
|
447
|
+
)
|
|
448
|
+
# Update version if table_name already exists
|
|
449
|
+
stmt = stmt.on_conflict_do_update(
|
|
450
|
+
index_elements=["table_name"],
|
|
451
|
+
set_=dict(version=version, updated_at=current_datetime),
|
|
452
|
+
)
|
|
453
|
+
await sess.execute(stmt)
|
|
454
|
+
|
|
300
455
|
# -- Session methods --
|
|
301
456
|
async def delete_session(self, session_id: str) -> bool:
|
|
302
457
|
"""
|
|
@@ -384,6 +539,11 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
384
539
|
|
|
385
540
|
if user_id is not None:
|
|
386
541
|
stmt = stmt.where(table.c.user_id == user_id)
|
|
542
|
+
|
|
543
|
+
# Filter by session_type to ensure we get the correct session type
|
|
544
|
+
session_type_value = session_type.value if isinstance(session_type, SessionType) else session_type
|
|
545
|
+
stmt = stmt.where(table.c.session_type == session_type_value)
|
|
546
|
+
|
|
387
547
|
result = await sess.execute(stmt)
|
|
388
548
|
row = result.fetchone()
|
|
389
549
|
if row is None:
|
|
@@ -466,9 +626,7 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
466
626
|
stmt = stmt.where(table.c.created_at <= end_timestamp)
|
|
467
627
|
if session_name is not None:
|
|
468
628
|
stmt = stmt.where(
|
|
469
|
-
func.coalesce(
|
|
470
|
-
f"%{session_name}%"
|
|
471
|
-
)
|
|
629
|
+
func.coalesce(table.c.session_data["session_name"].astext, "").ilike(f"%{session_name}%")
|
|
472
630
|
)
|
|
473
631
|
if session_type is not None:
|
|
474
632
|
session_type_value = session_type.value if isinstance(session_type, SessionType) else session_type
|
|
@@ -532,6 +690,8 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
532
690
|
table = await self._get_table(table_type="sessions")
|
|
533
691
|
|
|
534
692
|
async with self.async_session_factory() as sess, sess.begin():
|
|
693
|
+
# Sanitize session_name to remove null bytes
|
|
694
|
+
sanitized_session_name = sanitize_postgres_string(session_name)
|
|
535
695
|
stmt = (
|
|
536
696
|
update(table)
|
|
537
697
|
.where(table.c.session_id == session_id)
|
|
@@ -541,7 +701,7 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
541
701
|
func.jsonb_set(
|
|
542
702
|
func.cast(table.c.session_data, postgresql.JSONB),
|
|
543
703
|
text("'{session_name}'"),
|
|
544
|
-
func.to_jsonb(
|
|
704
|
+
func.to_jsonb(sanitized_session_name),
|
|
545
705
|
),
|
|
546
706
|
postgresql.JSON,
|
|
547
707
|
)
|
|
@@ -592,8 +752,23 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
592
752
|
Exception: If an error occurs during upsert.
|
|
593
753
|
"""
|
|
594
754
|
try:
|
|
595
|
-
table = await self._get_table(table_type="sessions")
|
|
755
|
+
table = await self._get_table(table_type="sessions", create_table_if_not_found=True)
|
|
596
756
|
session_dict = session.to_dict()
|
|
757
|
+
# Sanitize JSON/dict fields to remove null bytes from nested strings
|
|
758
|
+
if session_dict.get("agent_data"):
|
|
759
|
+
session_dict["agent_data"] = sanitize_postgres_strings(session_dict["agent_data"])
|
|
760
|
+
if session_dict.get("team_data"):
|
|
761
|
+
session_dict["team_data"] = sanitize_postgres_strings(session_dict["team_data"])
|
|
762
|
+
if session_dict.get("workflow_data"):
|
|
763
|
+
session_dict["workflow_data"] = sanitize_postgres_strings(session_dict["workflow_data"])
|
|
764
|
+
if session_dict.get("session_data"):
|
|
765
|
+
session_dict["session_data"] = sanitize_postgres_strings(session_dict["session_data"])
|
|
766
|
+
if session_dict.get("summary"):
|
|
767
|
+
session_dict["summary"] = sanitize_postgres_strings(session_dict["summary"])
|
|
768
|
+
if session_dict.get("metadata"):
|
|
769
|
+
session_dict["metadata"] = sanitize_postgres_strings(session_dict["metadata"])
|
|
770
|
+
if session_dict.get("runs"):
|
|
771
|
+
session_dict["runs"] = sanitize_postgres_strings(session_dict["runs"])
|
|
597
772
|
|
|
598
773
|
if isinstance(session, AgentSession):
|
|
599
774
|
async with self.async_session_factory() as sess, sess.begin():
|
|
@@ -723,7 +898,7 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
723
898
|
return None
|
|
724
899
|
|
|
725
900
|
# -- Memory methods --
|
|
726
|
-
async def delete_user_memory(self, memory_id: str):
|
|
901
|
+
async def delete_user_memory(self, memory_id: str, user_id: Optional[str] = None):
|
|
727
902
|
"""Delete a user memory from the database.
|
|
728
903
|
|
|
729
904
|
Returns:
|
|
@@ -737,6 +912,8 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
737
912
|
|
|
738
913
|
async with self.async_session_factory() as sess, sess.begin():
|
|
739
914
|
delete_stmt = table.delete().where(table.c.memory_id == memory_id)
|
|
915
|
+
if user_id is not None:
|
|
916
|
+
delete_stmt = delete_stmt.where(table.c.user_id == user_id)
|
|
740
917
|
result = await sess.execute(delete_stmt)
|
|
741
918
|
|
|
742
919
|
success = result.rowcount > 0 # type: ignore
|
|
@@ -748,7 +925,7 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
748
925
|
except Exception as e:
|
|
749
926
|
log_error(f"Error deleting user memory: {e}")
|
|
750
927
|
|
|
751
|
-
async def delete_user_memories(self, memory_ids: List[str]) -> None:
|
|
928
|
+
async def delete_user_memories(self, memory_ids: List[str], user_id: Optional[str] = None) -> None:
|
|
752
929
|
"""Delete user memories from the database.
|
|
753
930
|
|
|
754
931
|
Args:
|
|
@@ -762,6 +939,10 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
762
939
|
|
|
763
940
|
async with self.async_session_factory() as sess, sess.begin():
|
|
764
941
|
delete_stmt = table.delete().where(table.c.memory_id.in_(memory_ids))
|
|
942
|
+
|
|
943
|
+
if user_id is not None:
|
|
944
|
+
delete_stmt = delete_stmt.where(table.c.user_id == user_id)
|
|
945
|
+
|
|
765
946
|
result = await sess.execute(delete_stmt)
|
|
766
947
|
|
|
767
948
|
if result.rowcount == 0: # type: ignore
|
|
@@ -772,9 +953,12 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
772
953
|
except Exception as e:
|
|
773
954
|
log_error(f"Error deleting user memories: {e}")
|
|
774
955
|
|
|
775
|
-
async def get_all_memory_topics(self) -> List[str]:
|
|
956
|
+
async def get_all_memory_topics(self, user_id: Optional[str] = None) -> List[str]:
|
|
776
957
|
"""Get all memory topics from the database.
|
|
777
958
|
|
|
959
|
+
Args:
|
|
960
|
+
user_id (Optional[str]): The ID of the user to filter by.
|
|
961
|
+
|
|
778
962
|
Returns:
|
|
779
963
|
List[str]: List of memory topics.
|
|
780
964
|
"""
|
|
@@ -782,24 +966,57 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
782
966
|
table = await self._get_table(table_type="memories")
|
|
783
967
|
|
|
784
968
|
async with self.async_session_factory() as sess, sess.begin():
|
|
785
|
-
|
|
786
|
-
|
|
787
|
-
|
|
969
|
+
# Filter out NULL topics and ensure topics is an array before extracting elements
|
|
970
|
+
# jsonb_typeof returns 'array' for JSONB arrays
|
|
971
|
+
conditions = [
|
|
972
|
+
table.c.topics.is_not(None),
|
|
973
|
+
func.jsonb_typeof(table.c.topics) == "array",
|
|
974
|
+
]
|
|
975
|
+
if user_id is not None:
|
|
976
|
+
conditions.append(table.c.user_id == user_id)
|
|
977
|
+
|
|
978
|
+
try:
|
|
979
|
+
# jsonb_array_elements_text is a set-returning function that must be used with select_from
|
|
980
|
+
stmt = select(func.jsonb_array_elements_text(table.c.topics).label("topic"))
|
|
981
|
+
stmt = stmt.select_from(table)
|
|
982
|
+
stmt = stmt.where(and_(*conditions))
|
|
983
|
+
result = await sess.execute(stmt)
|
|
984
|
+
except ProgrammingError:
|
|
985
|
+
# Retrying with json_array_elements_text. This works in older versions,
|
|
986
|
+
# where the topics column was of type JSON instead of JSONB
|
|
987
|
+
# For JSON (not JSONB), we use json_typeof
|
|
988
|
+
json_conditions = [
|
|
989
|
+
table.c.topics.is_not(None),
|
|
990
|
+
func.json_typeof(table.c.topics) == "array",
|
|
991
|
+
]
|
|
992
|
+
if user_id is not None:
|
|
993
|
+
json_conditions.append(table.c.user_id == user_id)
|
|
994
|
+
stmt = select(func.json_array_elements_text(table.c.topics).label("topic"))
|
|
995
|
+
stmt = stmt.select_from(table)
|
|
996
|
+
stmt = stmt.where(and_(*json_conditions))
|
|
997
|
+
result = await sess.execute(stmt)
|
|
788
998
|
|
|
789
|
-
|
|
999
|
+
records = result.fetchall()
|
|
1000
|
+
# Extract topics from records - each record is a Row with a 'topic' attribute
|
|
1001
|
+
topics = [record.topic for record in records if record.topic is not None]
|
|
1002
|
+
return list(set(topics))
|
|
790
1003
|
|
|
791
1004
|
except Exception as e:
|
|
792
1005
|
log_error(f"Exception reading from memory table: {e}")
|
|
793
1006
|
return []
|
|
794
1007
|
|
|
795
1008
|
async def get_user_memory(
|
|
796
|
-
self,
|
|
1009
|
+
self,
|
|
1010
|
+
memory_id: str,
|
|
1011
|
+
deserialize: Optional[bool] = True,
|
|
1012
|
+
user_id: Optional[str] = None,
|
|
797
1013
|
) -> Optional[Union[UserMemory, Dict[str, Any]]]:
|
|
798
1014
|
"""Get a memory from the database.
|
|
799
1015
|
|
|
800
1016
|
Args:
|
|
801
1017
|
memory_id (str): The ID of the memory to get.
|
|
802
1018
|
deserialize (Optional[bool]): Whether to serialize the memory. Defaults to True.
|
|
1019
|
+
user_id (Optional[str]): The ID of the user to filter by.
|
|
803
1020
|
|
|
804
1021
|
Returns:
|
|
805
1022
|
Union[UserMemory, Dict[str, Any], None]:
|
|
@@ -814,6 +1031,8 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
814
1031
|
|
|
815
1032
|
async with self.async_session_factory() as sess, sess.begin():
|
|
816
1033
|
stmt = select(table).where(table.c.memory_id == memory_id)
|
|
1034
|
+
if user_id is not None:
|
|
1035
|
+
stmt = stmt.where(table.c.user_id == user_id)
|
|
817
1036
|
|
|
818
1037
|
result = await sess.execute(stmt)
|
|
819
1038
|
row = result.fetchone()
|
|
@@ -940,7 +1159,7 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
940
1159
|
await sess.execute(table.delete())
|
|
941
1160
|
|
|
942
1161
|
except Exception as e:
|
|
943
|
-
|
|
1162
|
+
log_error(f"Exception deleting all cultural knowledge: {e}")
|
|
944
1163
|
|
|
945
1164
|
async def delete_cultural_knowledge(self, id: str) -> None:
|
|
946
1165
|
"""Delete cultural knowledge by ID.
|
|
@@ -959,8 +1178,7 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
959
1178
|
await sess.execute(stmt)
|
|
960
1179
|
|
|
961
1180
|
except Exception as e:
|
|
962
|
-
|
|
963
|
-
raise e
|
|
1181
|
+
log_error(f"Exception deleting cultural knowledge: {e}")
|
|
964
1182
|
|
|
965
1183
|
async def get_cultural_knowledge(
|
|
966
1184
|
self, id: str, deserialize: Optional[bool] = True
|
|
@@ -996,8 +1214,8 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
996
1214
|
return deserialize_cultural_knowledge(db_row)
|
|
997
1215
|
|
|
998
1216
|
except Exception as e:
|
|
999
|
-
|
|
1000
|
-
|
|
1217
|
+
log_error(f"Exception reading cultural knowledge: {e}")
|
|
1218
|
+
return None
|
|
1001
1219
|
|
|
1002
1220
|
async def get_all_cultural_knowledge(
|
|
1003
1221
|
self,
|
|
@@ -1031,7 +1249,7 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
1031
1249
|
Exception: If an error occurs during retrieval.
|
|
1032
1250
|
"""
|
|
1033
1251
|
try:
|
|
1034
|
-
table = await self._get_table(table_type="culture")
|
|
1252
|
+
table = await self._get_table(table_type="culture", create_table_if_not_found=True)
|
|
1035
1253
|
|
|
1036
1254
|
async with self.async_session_factory() as sess:
|
|
1037
1255
|
# Build query with filters
|
|
@@ -1069,8 +1287,8 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
1069
1287
|
return [deserialize_cultural_knowledge(row) for row in db_rows]
|
|
1070
1288
|
|
|
1071
1289
|
except Exception as e:
|
|
1072
|
-
|
|
1073
|
-
|
|
1290
|
+
log_error(f"Exception reading all cultural knowledge: {e}")
|
|
1291
|
+
return [] if deserialize else ([], 0)
|
|
1074
1292
|
|
|
1075
1293
|
async def upsert_cultural_knowledge(
|
|
1076
1294
|
self, cultural_knowledge: CulturalKnowledge, deserialize: Optional[bool] = True
|
|
@@ -1088,7 +1306,7 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
1088
1306
|
Exception: If an error occurs during upsert.
|
|
1089
1307
|
"""
|
|
1090
1308
|
try:
|
|
1091
|
-
table = await self._get_table(table_type="culture")
|
|
1309
|
+
table = await self._get_table(table_type="culture", create_table_if_not_found=True)
|
|
1092
1310
|
|
|
1093
1311
|
# Generate ID if not present
|
|
1094
1312
|
if cultural_knowledge.id is None:
|
|
@@ -1096,16 +1314,26 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
1096
1314
|
|
|
1097
1315
|
# Serialize content, categories, and notes into a JSON dict for DB storage
|
|
1098
1316
|
content_dict = serialize_cultural_knowledge(cultural_knowledge)
|
|
1317
|
+
# Sanitize content_dict to remove null bytes from nested strings
|
|
1318
|
+
if content_dict:
|
|
1319
|
+
content_dict = cast(Dict[str, Any], sanitize_postgres_strings(content_dict))
|
|
1320
|
+
|
|
1321
|
+
# Sanitize string fields to remove null bytes (PostgreSQL doesn't allow them)
|
|
1322
|
+
sanitized_name = sanitize_postgres_string(cultural_knowledge.name)
|
|
1323
|
+
sanitized_summary = sanitize_postgres_string(cultural_knowledge.summary)
|
|
1324
|
+
sanitized_input = sanitize_postgres_string(cultural_knowledge.input)
|
|
1099
1325
|
|
|
1100
1326
|
async with self.async_session_factory() as sess, sess.begin():
|
|
1101
1327
|
# Use PostgreSQL-specific insert with on_conflict_do_update
|
|
1102
1328
|
insert_stmt = postgresql.insert(table).values(
|
|
1103
1329
|
id=cultural_knowledge.id,
|
|
1104
|
-
name=
|
|
1105
|
-
summary=
|
|
1330
|
+
name=sanitized_name,
|
|
1331
|
+
summary=sanitized_summary,
|
|
1106
1332
|
content=content_dict if content_dict else None,
|
|
1107
|
-
metadata=cultural_knowledge.metadata
|
|
1108
|
-
|
|
1333
|
+
metadata=sanitize_postgres_strings(cultural_knowledge.metadata)
|
|
1334
|
+
if cultural_knowledge.metadata
|
|
1335
|
+
else None,
|
|
1336
|
+
input=sanitized_input,
|
|
1109
1337
|
created_at=cultural_knowledge.created_at,
|
|
1110
1338
|
updated_at=int(time.time()),
|
|
1111
1339
|
agent_id=cultural_knowledge.agent_id,
|
|
@@ -1114,11 +1342,13 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
1114
1342
|
|
|
1115
1343
|
# Update all fields except id on conflict
|
|
1116
1344
|
update_dict = {
|
|
1117
|
-
"name":
|
|
1118
|
-
"summary":
|
|
1345
|
+
"name": sanitized_name,
|
|
1346
|
+
"summary": sanitized_summary,
|
|
1119
1347
|
"content": content_dict if content_dict else None,
|
|
1120
|
-
"metadata": cultural_knowledge.metadata
|
|
1121
|
-
|
|
1348
|
+
"metadata": sanitize_postgres_strings(cultural_knowledge.metadata)
|
|
1349
|
+
if cultural_knowledge.metadata
|
|
1350
|
+
else None,
|
|
1351
|
+
"input": sanitized_input,
|
|
1122
1352
|
"updated_at": int(time.time()),
|
|
1123
1353
|
"agent_id": cultural_knowledge.agent_id,
|
|
1124
1354
|
"team_id": cultural_knowledge.team_id,
|
|
@@ -1146,13 +1376,14 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
1146
1376
|
raise e
|
|
1147
1377
|
|
|
1148
1378
|
async def get_user_memory_stats(
|
|
1149
|
-
self, limit: Optional[int] = None, page: Optional[int] = None
|
|
1379
|
+
self, limit: Optional[int] = None, page: Optional[int] = None, user_id: Optional[str] = None
|
|
1150
1380
|
) -> Tuple[List[Dict[str, Any]], int]:
|
|
1151
1381
|
"""Get user memories stats.
|
|
1152
1382
|
|
|
1153
1383
|
Args:
|
|
1154
1384
|
limit (Optional[int]): The maximum number of user stats to return.
|
|
1155
1385
|
page (Optional[int]): The page number.
|
|
1386
|
+
user_id (Optional[str]): User ID for filtering.
|
|
1156
1387
|
|
|
1157
1388
|
Returns:
|
|
1158
1389
|
Tuple[List[Dict[str, Any]], int]: A list of dictionaries containing user stats and total count.
|
|
@@ -1173,17 +1404,19 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
1173
1404
|
table = await self._get_table(table_type="memories")
|
|
1174
1405
|
|
|
1175
1406
|
async with self.async_session_factory() as sess, sess.begin():
|
|
1176
|
-
stmt = (
|
|
1177
|
-
|
|
1178
|
-
|
|
1179
|
-
|
|
1180
|
-
func.max(table.c.updated_at).label("last_memory_updated_at"),
|
|
1181
|
-
)
|
|
1182
|
-
.where(table.c.user_id.is_not(None))
|
|
1183
|
-
.group_by(table.c.user_id)
|
|
1184
|
-
.order_by(func.max(table.c.updated_at).desc())
|
|
1407
|
+
stmt = select(
|
|
1408
|
+
table.c.user_id,
|
|
1409
|
+
func.count(table.c.memory_id).label("total_memories"),
|
|
1410
|
+
func.max(table.c.updated_at).label("last_memory_updated_at"),
|
|
1185
1411
|
)
|
|
1186
1412
|
|
|
1413
|
+
if user_id is not None:
|
|
1414
|
+
stmt = stmt.where(table.c.user_id == user_id)
|
|
1415
|
+
else:
|
|
1416
|
+
stmt = stmt.where(table.c.user_id.is_not(None))
|
|
1417
|
+
stmt = stmt.group_by(table.c.user_id)
|
|
1418
|
+
stmt = stmt.order_by(func.max(table.c.updated_at).desc())
|
|
1419
|
+
|
|
1187
1420
|
count_stmt = select(func.count()).select_from(stmt.alias())
|
|
1188
1421
|
total_count = await sess.scalar(count_stmt) or 0
|
|
1189
1422
|
|
|
@@ -1229,38 +1462,55 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
1229
1462
|
Exception: If an error occurs during upsert.
|
|
1230
1463
|
"""
|
|
1231
1464
|
try:
|
|
1232
|
-
table = await self._get_table(table_type="memories")
|
|
1465
|
+
table = await self._get_table(table_type="memories", create_table_if_not_found=True)
|
|
1233
1466
|
|
|
1234
|
-
|
|
1235
|
-
if memory.memory_id is None:
|
|
1236
|
-
memory.memory_id = str(uuid4())
|
|
1467
|
+
current_time = int(time.time())
|
|
1237
1468
|
|
|
1238
|
-
|
|
1239
|
-
|
|
1240
|
-
|
|
1241
|
-
|
|
1242
|
-
|
|
1243
|
-
|
|
1244
|
-
|
|
1245
|
-
|
|
1246
|
-
|
|
1247
|
-
|
|
1248
|
-
|
|
1249
|
-
|
|
1250
|
-
|
|
1251
|
-
|
|
1252
|
-
|
|
1253
|
-
input=
|
|
1469
|
+
# Sanitize string fields to remove null bytes (PostgreSQL doesn't allow them)
|
|
1470
|
+
sanitized_input = sanitize_postgres_string(memory.input)
|
|
1471
|
+
sanitized_feedback = sanitize_postgres_string(memory.feedback)
|
|
1472
|
+
# Sanitize JSONB fields to remove null bytes from nested strings
|
|
1473
|
+
sanitized_memory = sanitize_postgres_strings(memory.memory) if memory.memory else None
|
|
1474
|
+
sanitized_topics = sanitize_postgres_strings(memory.topics) if memory.topics else None
|
|
1475
|
+
|
|
1476
|
+
async with self.async_session_factory() as sess:
|
|
1477
|
+
async with sess.begin():
|
|
1478
|
+
if memory.memory_id is None:
|
|
1479
|
+
memory.memory_id = str(uuid4())
|
|
1480
|
+
|
|
1481
|
+
stmt = postgresql.insert(table).values(
|
|
1482
|
+
memory_id=memory.memory_id,
|
|
1483
|
+
memory=sanitized_memory,
|
|
1484
|
+
input=sanitized_input,
|
|
1485
|
+
user_id=memory.user_id,
|
|
1254
1486
|
agent_id=memory.agent_id,
|
|
1255
1487
|
team_id=memory.team_id,
|
|
1256
|
-
|
|
1257
|
-
|
|
1258
|
-
|
|
1488
|
+
topics=sanitized_topics,
|
|
1489
|
+
feedback=sanitized_feedback,
|
|
1490
|
+
created_at=memory.created_at,
|
|
1491
|
+
updated_at=memory.updated_at
|
|
1492
|
+
if memory.updated_at is not None
|
|
1493
|
+
else (memory.created_at if memory.created_at is not None else current_time),
|
|
1494
|
+
)
|
|
1495
|
+
stmt = stmt.on_conflict_do_update( # type: ignore
|
|
1496
|
+
index_elements=["memory_id"],
|
|
1497
|
+
set_=dict(
|
|
1498
|
+
memory=sanitized_memory,
|
|
1499
|
+
topics=sanitized_topics,
|
|
1500
|
+
input=sanitized_input,
|
|
1501
|
+
agent_id=memory.agent_id,
|
|
1502
|
+
team_id=memory.team_id,
|
|
1503
|
+
feedback=sanitized_feedback,
|
|
1504
|
+
updated_at=current_time,
|
|
1505
|
+
# Preserve created_at on update - don't overwrite existing value
|
|
1506
|
+
created_at=table.c.created_at,
|
|
1507
|
+
),
|
|
1508
|
+
).returning(table)
|
|
1259
1509
|
|
|
1260
|
-
|
|
1261
|
-
|
|
1262
|
-
|
|
1263
|
-
|
|
1510
|
+
result = await sess.execute(stmt)
|
|
1511
|
+
row = result.fetchone()
|
|
1512
|
+
if row is None:
|
|
1513
|
+
return None
|
|
1264
1514
|
|
|
1265
1515
|
memory_raw = dict(row._mapping)
|
|
1266
1516
|
|
|
@@ -1364,7 +1614,7 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
1364
1614
|
Exception: If an error occurs during metrics calculation.
|
|
1365
1615
|
"""
|
|
1366
1616
|
try:
|
|
1367
|
-
table = await self._get_table(table_type="metrics")
|
|
1617
|
+
table = await self._get_table(table_type="metrics", create_table_if_not_found=True)
|
|
1368
1618
|
|
|
1369
1619
|
starting_date = await self._get_metrics_calculation_starting_date(table)
|
|
1370
1620
|
|
|
@@ -1440,7 +1690,7 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
1440
1690
|
Exception: If an error occurs during retrieval.
|
|
1441
1691
|
"""
|
|
1442
1692
|
try:
|
|
1443
|
-
table = await self._get_table(table_type="metrics")
|
|
1693
|
+
table = await self._get_table(table_type="metrics", create_table_if_not_found=True)
|
|
1444
1694
|
|
|
1445
1695
|
async with self.async_session_factory() as sess, sess.begin():
|
|
1446
1696
|
stmt = select(table)
|
|
@@ -1490,7 +1740,7 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
1490
1740
|
Returns:
|
|
1491
1741
|
Optional[KnowledgeRow]: The knowledge row, or None if it doesn't exist.
|
|
1492
1742
|
"""
|
|
1493
|
-
table = await self._get_table(table_type="knowledge")
|
|
1743
|
+
table = await self._get_table(table_type="knowledge", create_table_if_not_found=True)
|
|
1494
1744
|
|
|
1495
1745
|
try:
|
|
1496
1746
|
async with self.async_session_factory() as sess, sess.begin():
|
|
@@ -1534,8 +1784,7 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
1534
1784
|
stmt = select(table)
|
|
1535
1785
|
|
|
1536
1786
|
# Apply sorting
|
|
1537
|
-
|
|
1538
|
-
stmt = stmt.order_by(getattr(table.c, sort_by) * (1 if sort_order == "asc" else -1))
|
|
1787
|
+
stmt = apply_sorting(stmt, table, sort_by, sort_order)
|
|
1539
1788
|
|
|
1540
1789
|
# Get total count before applying limit and pagination
|
|
1541
1790
|
count_stmt = select(func.count()).select_from(stmt.alias())
|
|
@@ -1565,7 +1814,7 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
1565
1814
|
Optional[KnowledgeRow]: The upserted knowledge row, or None if the operation fails.
|
|
1566
1815
|
"""
|
|
1567
1816
|
try:
|
|
1568
|
-
table = await self._get_table(table_type="knowledge")
|
|
1817
|
+
table = await self._get_table(table_type="knowledge", create_table_if_not_found=True)
|
|
1569
1818
|
async with self.async_session_factory() as sess, sess.begin():
|
|
1570
1819
|
# Get the actual table columns to avoid "unconsumed column names" error
|
|
1571
1820
|
table_columns = set(table.columns.keys())
|
|
@@ -1592,10 +1841,19 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
1592
1841
|
}
|
|
1593
1842
|
|
|
1594
1843
|
# Build insert and update data only for fields that exist in the table
|
|
1844
|
+
# String fields that need sanitization
|
|
1845
|
+
string_fields = {"name", "description", "type", "status", "status_message", "external_id", "linked_to"}
|
|
1846
|
+
|
|
1595
1847
|
for model_field, table_column in field_mapping.items():
|
|
1596
1848
|
if table_column in table_columns:
|
|
1597
1849
|
value = getattr(knowledge_row, model_field, None)
|
|
1598
1850
|
if value is not None:
|
|
1851
|
+
# Sanitize string fields to remove null bytes
|
|
1852
|
+
if table_column in string_fields and isinstance(value, str):
|
|
1853
|
+
value = sanitize_postgres_string(value)
|
|
1854
|
+
# Sanitize metadata dict if present
|
|
1855
|
+
elif table_column == "metadata" and isinstance(value, dict):
|
|
1856
|
+
value = sanitize_postgres_strings(value)
|
|
1599
1857
|
insert_data[table_column] = value
|
|
1600
1858
|
# Don't include ID in update_fields since it's the primary key
|
|
1601
1859
|
if table_column != "id":
|
|
@@ -1646,12 +1904,26 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
1646
1904
|
Exception: If an error occurs during creation.
|
|
1647
1905
|
"""
|
|
1648
1906
|
try:
|
|
1649
|
-
table = await self._get_table(table_type="evals")
|
|
1907
|
+
table = await self._get_table(table_type="evals", create_table_if_not_found=True)
|
|
1650
1908
|
|
|
1651
1909
|
async with self.async_session_factory() as sess, sess.begin():
|
|
1652
1910
|
current_time = int(time.time())
|
|
1911
|
+
eval_data = eval_run.model_dump()
|
|
1912
|
+
# Sanitize string fields in eval_run
|
|
1913
|
+
if eval_data.get("name"):
|
|
1914
|
+
eval_data["name"] = sanitize_postgres_string(eval_data["name"])
|
|
1915
|
+
if eval_data.get("evaluated_component_name"):
|
|
1916
|
+
eval_data["evaluated_component_name"] = sanitize_postgres_string(
|
|
1917
|
+
eval_data["evaluated_component_name"]
|
|
1918
|
+
)
|
|
1919
|
+
# Sanitize nested dicts/JSON fields
|
|
1920
|
+
if eval_data.get("eval_data"):
|
|
1921
|
+
eval_data["eval_data"] = sanitize_postgres_strings(eval_data["eval_data"])
|
|
1922
|
+
if eval_data.get("eval_input"):
|
|
1923
|
+
eval_data["eval_input"] = sanitize_postgres_strings(eval_data["eval_input"])
|
|
1924
|
+
|
|
1653
1925
|
stmt = postgresql.insert(table).values(
|
|
1654
|
-
{"created_at": current_time, "updated_at": current_time, **
|
|
1926
|
+
{"created_at": current_time, "updated_at": current_time, **eval_data}
|
|
1655
1927
|
)
|
|
1656
1928
|
await sess.execute(stmt)
|
|
1657
1929
|
|
|
@@ -1853,8 +2125,12 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
1853
2125
|
try:
|
|
1854
2126
|
table = await self._get_table(table_type="evals")
|
|
1855
2127
|
async with self.async_session_factory() as sess, sess.begin():
|
|
2128
|
+
# Sanitize string field to remove null bytes
|
|
2129
|
+
sanitized_name = sanitize_postgres_string(name)
|
|
1856
2130
|
stmt = (
|
|
1857
|
-
table.update()
|
|
2131
|
+
table.update()
|
|
2132
|
+
.where(table.c.run_id == eval_run_id)
|
|
2133
|
+
.values(name=sanitized_name, updated_at=int(time.time()))
|
|
1858
2134
|
)
|
|
1859
2135
|
await sess.execute(stmt)
|
|
1860
2136
|
|
|
@@ -1925,3 +2201,844 @@ class AsyncPostgresDb(AsyncBaseDb):
|
|
|
1925
2201
|
for memory in memories:
|
|
1926
2202
|
await self.upsert_user_memory(memory)
|
|
1927
2203
|
log_info(f"Migrated {len(memories)} memories to table: {self.memory_table}")
|
|
2204
|
+
|
|
2205
|
+
# --- Traces ---
|
|
2206
|
+
def _get_traces_base_query(self, table: Table, spans_table: Optional[Table] = None):
|
|
2207
|
+
"""Build base query for traces with aggregated span counts.
|
|
2208
|
+
|
|
2209
|
+
Args:
|
|
2210
|
+
table: The traces table.
|
|
2211
|
+
spans_table: The spans table (optional).
|
|
2212
|
+
|
|
2213
|
+
Returns:
|
|
2214
|
+
SQLAlchemy select statement with total_spans and error_count calculated dynamically.
|
|
2215
|
+
"""
|
|
2216
|
+
from sqlalchemy import case, literal
|
|
2217
|
+
|
|
2218
|
+
if spans_table is not None:
|
|
2219
|
+
# JOIN with spans table to calculate total_spans and error_count
|
|
2220
|
+
return (
|
|
2221
|
+
select(
|
|
2222
|
+
table,
|
|
2223
|
+
func.coalesce(func.count(spans_table.c.span_id), 0).label("total_spans"),
|
|
2224
|
+
func.coalesce(func.sum(case((spans_table.c.status_code == "ERROR", 1), else_=0)), 0).label(
|
|
2225
|
+
"error_count"
|
|
2226
|
+
),
|
|
2227
|
+
)
|
|
2228
|
+
.select_from(table.outerjoin(spans_table, table.c.trace_id == spans_table.c.trace_id))
|
|
2229
|
+
.group_by(table.c.trace_id)
|
|
2230
|
+
)
|
|
2231
|
+
else:
|
|
2232
|
+
# Fallback if spans table doesn't exist
|
|
2233
|
+
return select(table, literal(0).label("total_spans"), literal(0).label("error_count"))
|
|
2234
|
+
|
|
2235
|
+
def _get_trace_component_level_expr(self, workflow_id_col, team_id_col, agent_id_col, name_col):
|
|
2236
|
+
"""Build a SQL CASE expression that returns the component level for a trace.
|
|
2237
|
+
|
|
2238
|
+
Component levels (higher = more important):
|
|
2239
|
+
- 3: Workflow root (.run or .arun with workflow_id)
|
|
2240
|
+
- 2: Team root (.run or .arun with team_id)
|
|
2241
|
+
- 1: Agent root (.run or .arun with agent_id)
|
|
2242
|
+
- 0: Child span (not a root)
|
|
2243
|
+
|
|
2244
|
+
Args:
|
|
2245
|
+
workflow_id_col: SQL column/expression for workflow_id
|
|
2246
|
+
team_id_col: SQL column/expression for team_id
|
|
2247
|
+
agent_id_col: SQL column/expression for agent_id
|
|
2248
|
+
name_col: SQL column/expression for name
|
|
2249
|
+
|
|
2250
|
+
Returns:
|
|
2251
|
+
SQLAlchemy CASE expression returning the component level as an integer.
|
|
2252
|
+
"""
|
|
2253
|
+
is_root_name = or_(name_col.contains(".run"), name_col.contains(".arun"))
|
|
2254
|
+
|
|
2255
|
+
return case(
|
|
2256
|
+
# Workflow root (level 3)
|
|
2257
|
+
(and_(workflow_id_col.isnot(None), is_root_name), 3),
|
|
2258
|
+
# Team root (level 2)
|
|
2259
|
+
(and_(team_id_col.isnot(None), is_root_name), 2),
|
|
2260
|
+
# Agent root (level 1)
|
|
2261
|
+
(and_(agent_id_col.isnot(None), is_root_name), 1),
|
|
2262
|
+
# Child span or unknown (level 0)
|
|
2263
|
+
else_=0,
|
|
2264
|
+
)
|
|
2265
|
+
|
|
2266
|
+
async def upsert_trace(self, trace: "Trace") -> None:
|
|
2267
|
+
"""Create or update a single trace record in the database.
|
|
2268
|
+
|
|
2269
|
+
Uses INSERT ... ON CONFLICT DO UPDATE (upsert) to handle concurrent inserts
|
|
2270
|
+
atomically and avoid race conditions.
|
|
2271
|
+
|
|
2272
|
+
Args:
|
|
2273
|
+
trace: The Trace object to store (one per trace_id).
|
|
2274
|
+
"""
|
|
2275
|
+
try:
|
|
2276
|
+
table = await self._get_table(table_type="traces", create_table_if_not_found=True)
|
|
2277
|
+
|
|
2278
|
+
trace_dict = trace.to_dict()
|
|
2279
|
+
trace_dict.pop("total_spans", None)
|
|
2280
|
+
trace_dict.pop("error_count", None)
|
|
2281
|
+
# Sanitize string fields and nested JSON structures
|
|
2282
|
+
if trace_dict.get("name"):
|
|
2283
|
+
trace_dict["name"] = sanitize_postgres_string(trace_dict["name"])
|
|
2284
|
+
if trace_dict.get("status"):
|
|
2285
|
+
trace_dict["status"] = sanitize_postgres_string(trace_dict["status"])
|
|
2286
|
+
# Sanitize any nested dict/JSON fields
|
|
2287
|
+
trace_dict = cast(Dict[str, Any], sanitize_postgres_strings(trace_dict))
|
|
2288
|
+
|
|
2289
|
+
async with self.async_session_factory() as sess, sess.begin():
|
|
2290
|
+
# Use upsert to handle concurrent inserts atomically
|
|
2291
|
+
# On conflict, update fields while preserving existing non-null context values
|
|
2292
|
+
# and keeping the earliest start_time
|
|
2293
|
+
insert_stmt = postgresql.insert(table).values(trace_dict)
|
|
2294
|
+
|
|
2295
|
+
# Build component level expressions for comparing trace priority
|
|
2296
|
+
new_level = self._get_trace_component_level_expr(
|
|
2297
|
+
insert_stmt.excluded.workflow_id,
|
|
2298
|
+
insert_stmt.excluded.team_id,
|
|
2299
|
+
insert_stmt.excluded.agent_id,
|
|
2300
|
+
insert_stmt.excluded.name,
|
|
2301
|
+
)
|
|
2302
|
+
existing_level = self._get_trace_component_level_expr(
|
|
2303
|
+
table.c.workflow_id,
|
|
2304
|
+
table.c.team_id,
|
|
2305
|
+
table.c.agent_id,
|
|
2306
|
+
table.c.name,
|
|
2307
|
+
)
|
|
2308
|
+
|
|
2309
|
+
# Build the ON CONFLICT DO UPDATE clause
|
|
2310
|
+
# Use LEAST for start_time, GREATEST for end_time to capture full trace duration
|
|
2311
|
+
# Use COALESCE to preserve existing non-null context values
|
|
2312
|
+
upsert_stmt = insert_stmt.on_conflict_do_update(
|
|
2313
|
+
index_elements=["trace_id"],
|
|
2314
|
+
set_={
|
|
2315
|
+
"end_time": func.greatest(table.c.end_time, insert_stmt.excluded.end_time),
|
|
2316
|
+
"start_time": func.least(table.c.start_time, insert_stmt.excluded.start_time),
|
|
2317
|
+
"duration_ms": func.extract(
|
|
2318
|
+
"epoch",
|
|
2319
|
+
func.cast(
|
|
2320
|
+
func.greatest(table.c.end_time, insert_stmt.excluded.end_time),
|
|
2321
|
+
TIMESTAMP(timezone=True),
|
|
2322
|
+
)
|
|
2323
|
+
- func.cast(
|
|
2324
|
+
func.least(table.c.start_time, insert_stmt.excluded.start_time),
|
|
2325
|
+
TIMESTAMP(timezone=True),
|
|
2326
|
+
),
|
|
2327
|
+
)
|
|
2328
|
+
* 1000,
|
|
2329
|
+
"status": insert_stmt.excluded.status,
|
|
2330
|
+
# Update name only if new trace is from a higher-level component
|
|
2331
|
+
# Priority: workflow (3) > team (2) > agent (1) > child spans (0)
|
|
2332
|
+
"name": case(
|
|
2333
|
+
(new_level > existing_level, insert_stmt.excluded.name),
|
|
2334
|
+
else_=table.c.name,
|
|
2335
|
+
),
|
|
2336
|
+
# Preserve existing non-null context values using COALESCE
|
|
2337
|
+
"run_id": func.coalesce(insert_stmt.excluded.run_id, table.c.run_id),
|
|
2338
|
+
"session_id": func.coalesce(insert_stmt.excluded.session_id, table.c.session_id),
|
|
2339
|
+
"user_id": func.coalesce(insert_stmt.excluded.user_id, table.c.user_id),
|
|
2340
|
+
"agent_id": func.coalesce(insert_stmt.excluded.agent_id, table.c.agent_id),
|
|
2341
|
+
"team_id": func.coalesce(insert_stmt.excluded.team_id, table.c.team_id),
|
|
2342
|
+
"workflow_id": func.coalesce(insert_stmt.excluded.workflow_id, table.c.workflow_id),
|
|
2343
|
+
},
|
|
2344
|
+
)
|
|
2345
|
+
await sess.execute(upsert_stmt)
|
|
2346
|
+
|
|
2347
|
+
except Exception as e:
|
|
2348
|
+
log_error(f"Error creating trace: {e}")
|
|
2349
|
+
# Don't raise - tracing should not break the main application flow
|
|
2350
|
+
|
|
2351
|
+
async def get_trace(
|
|
2352
|
+
self,
|
|
2353
|
+
trace_id: Optional[str] = None,
|
|
2354
|
+
run_id: Optional[str] = None,
|
|
2355
|
+
):
|
|
2356
|
+
"""Get a single trace by trace_id or other filters.
|
|
2357
|
+
|
|
2358
|
+
Args:
|
|
2359
|
+
trace_id: The unique trace identifier.
|
|
2360
|
+
run_id: Filter by run ID (returns first match).
|
|
2361
|
+
|
|
2362
|
+
Returns:
|
|
2363
|
+
Optional[Trace]: The trace if found, None otherwise.
|
|
2364
|
+
|
|
2365
|
+
Note:
|
|
2366
|
+
If multiple filters are provided, trace_id takes precedence.
|
|
2367
|
+
For other filters, the most recent trace is returned.
|
|
2368
|
+
"""
|
|
2369
|
+
try:
|
|
2370
|
+
from agno.tracing.schemas import Trace
|
|
2371
|
+
|
|
2372
|
+
table = await self._get_table(table_type="traces")
|
|
2373
|
+
|
|
2374
|
+
# Get spans table for JOIN
|
|
2375
|
+
spans_table = await self._get_table(table_type="spans")
|
|
2376
|
+
|
|
2377
|
+
async with self.async_session_factory() as sess:
|
|
2378
|
+
# Build query with aggregated span counts
|
|
2379
|
+
stmt = self._get_traces_base_query(table, spans_table)
|
|
2380
|
+
|
|
2381
|
+
if trace_id:
|
|
2382
|
+
stmt = stmt.where(table.c.trace_id == trace_id)
|
|
2383
|
+
elif run_id:
|
|
2384
|
+
stmt = stmt.where(table.c.run_id == run_id)
|
|
2385
|
+
else:
|
|
2386
|
+
log_debug("get_trace called without any filter parameters")
|
|
2387
|
+
return None
|
|
2388
|
+
|
|
2389
|
+
# Order by most recent and get first result
|
|
2390
|
+
stmt = stmt.order_by(table.c.start_time.desc()).limit(1)
|
|
2391
|
+
result = await sess.execute(stmt)
|
|
2392
|
+
row = result.fetchone()
|
|
2393
|
+
|
|
2394
|
+
if row:
|
|
2395
|
+
return Trace.from_dict(dict(row._mapping))
|
|
2396
|
+
return None
|
|
2397
|
+
|
|
2398
|
+
except Exception as e:
|
|
2399
|
+
log_error(f"Error getting trace: {e}")
|
|
2400
|
+
return None
|
|
2401
|
+
|
|
2402
|
+
async def get_traces(
|
|
2403
|
+
self,
|
|
2404
|
+
run_id: Optional[str] = None,
|
|
2405
|
+
session_id: Optional[str] = None,
|
|
2406
|
+
user_id: Optional[str] = None,
|
|
2407
|
+
agent_id: Optional[str] = None,
|
|
2408
|
+
team_id: Optional[str] = None,
|
|
2409
|
+
workflow_id: Optional[str] = None,
|
|
2410
|
+
status: Optional[str] = None,
|
|
2411
|
+
start_time: Optional[datetime] = None,
|
|
2412
|
+
end_time: Optional[datetime] = None,
|
|
2413
|
+
limit: Optional[int] = 20,
|
|
2414
|
+
page: Optional[int] = 1,
|
|
2415
|
+
) -> tuple[List, int]:
|
|
2416
|
+
"""Get traces matching the provided filters with pagination.
|
|
2417
|
+
|
|
2418
|
+
Args:
|
|
2419
|
+
run_id: Filter by run ID.
|
|
2420
|
+
session_id: Filter by session ID.
|
|
2421
|
+
user_id: Filter by user ID.
|
|
2422
|
+
agent_id: Filter by agent ID.
|
|
2423
|
+
team_id: Filter by team ID.
|
|
2424
|
+
workflow_id: Filter by workflow ID.
|
|
2425
|
+
status: Filter by status (OK, ERROR, UNSET).
|
|
2426
|
+
start_time: Filter traces starting after this datetime.
|
|
2427
|
+
end_time: Filter traces ending before this datetime.
|
|
2428
|
+
limit: Maximum number of traces to return per page.
|
|
2429
|
+
page: Page number (1-indexed).
|
|
2430
|
+
|
|
2431
|
+
Returns:
|
|
2432
|
+
tuple[List[Trace], int]: Tuple of (list of matching traces, total count).
|
|
2433
|
+
"""
|
|
2434
|
+
try:
|
|
2435
|
+
from agno.tracing.schemas import Trace
|
|
2436
|
+
|
|
2437
|
+
table = await self._get_table(table_type="traces")
|
|
2438
|
+
|
|
2439
|
+
# Get spans table for JOIN
|
|
2440
|
+
spans_table = await self._get_table(table_type="spans")
|
|
2441
|
+
|
|
2442
|
+
async with self.async_session_factory() as sess:
|
|
2443
|
+
# Build base query with aggregated span counts
|
|
2444
|
+
base_stmt = self._get_traces_base_query(table, spans_table)
|
|
2445
|
+
|
|
2446
|
+
# Apply filters
|
|
2447
|
+
if run_id:
|
|
2448
|
+
base_stmt = base_stmt.where(table.c.run_id == run_id)
|
|
2449
|
+
if session_id:
|
|
2450
|
+
base_stmt = base_stmt.where(table.c.session_id == session_id)
|
|
2451
|
+
if user_id:
|
|
2452
|
+
base_stmt = base_stmt.where(table.c.user_id == user_id)
|
|
2453
|
+
if agent_id:
|
|
2454
|
+
base_stmt = base_stmt.where(table.c.agent_id == agent_id)
|
|
2455
|
+
if team_id:
|
|
2456
|
+
base_stmt = base_stmt.where(table.c.team_id == team_id)
|
|
2457
|
+
if workflow_id:
|
|
2458
|
+
base_stmt = base_stmt.where(table.c.workflow_id == workflow_id)
|
|
2459
|
+
if status:
|
|
2460
|
+
base_stmt = base_stmt.where(table.c.status == status)
|
|
2461
|
+
if start_time:
|
|
2462
|
+
# Convert datetime to ISO string for comparison
|
|
2463
|
+
base_stmt = base_stmt.where(table.c.start_time >= start_time.isoformat())
|
|
2464
|
+
if end_time:
|
|
2465
|
+
# Convert datetime to ISO string for comparison
|
|
2466
|
+
base_stmt = base_stmt.where(table.c.end_time <= end_time.isoformat())
|
|
2467
|
+
|
|
2468
|
+
# Get total count
|
|
2469
|
+
count_stmt = select(func.count()).select_from(base_stmt.alias())
|
|
2470
|
+
total_count = await sess.scalar(count_stmt) or 0
|
|
2471
|
+
log_debug(f"Total matching traces: {total_count}")
|
|
2472
|
+
|
|
2473
|
+
# Apply pagination
|
|
2474
|
+
offset = (page - 1) * limit if page and limit else 0
|
|
2475
|
+
paginated_stmt = base_stmt.order_by(table.c.start_time.desc()).limit(limit).offset(offset)
|
|
2476
|
+
|
|
2477
|
+
result = await sess.execute(paginated_stmt)
|
|
2478
|
+
results = result.fetchall()
|
|
2479
|
+
log_debug(f"Returning page {page} with {len(results)} traces")
|
|
2480
|
+
|
|
2481
|
+
traces = [Trace.from_dict(dict(row._mapping)) for row in results]
|
|
2482
|
+
return traces, total_count
|
|
2483
|
+
|
|
2484
|
+
except Exception as e:
|
|
2485
|
+
log_error(f"Error getting traces: {e}")
|
|
2486
|
+
return [], 0
|
|
2487
|
+
|
|
2488
|
+
async def get_trace_stats(
|
|
2489
|
+
self,
|
|
2490
|
+
user_id: Optional[str] = None,
|
|
2491
|
+
agent_id: Optional[str] = None,
|
|
2492
|
+
team_id: Optional[str] = None,
|
|
2493
|
+
workflow_id: Optional[str] = None,
|
|
2494
|
+
start_time: Optional[datetime] = None,
|
|
2495
|
+
end_time: Optional[datetime] = None,
|
|
2496
|
+
limit: Optional[int] = 20,
|
|
2497
|
+
page: Optional[int] = 1,
|
|
2498
|
+
) -> tuple[List[Dict[str, Any]], int]:
|
|
2499
|
+
"""Get trace statistics grouped by session.
|
|
2500
|
+
|
|
2501
|
+
Args:
|
|
2502
|
+
user_id: Filter by user ID.
|
|
2503
|
+
agent_id: Filter by agent ID.
|
|
2504
|
+
team_id: Filter by team ID.
|
|
2505
|
+
workflow_id: Filter by workflow ID.
|
|
2506
|
+
start_time: Filter sessions with traces created after this datetime.
|
|
2507
|
+
end_time: Filter sessions with traces created before this datetime.
|
|
2508
|
+
limit: Maximum number of sessions to return per page.
|
|
2509
|
+
page: Page number (1-indexed).
|
|
2510
|
+
|
|
2511
|
+
Returns:
|
|
2512
|
+
tuple[List[Dict], int]: Tuple of (list of session stats dicts, total count).
|
|
2513
|
+
Each dict contains: session_id, user_id, agent_id, team_id, total_traces,
|
|
2514
|
+
workflow_id, first_trace_at, last_trace_at.
|
|
2515
|
+
"""
|
|
2516
|
+
try:
|
|
2517
|
+
table = await self._get_table(table_type="traces")
|
|
2518
|
+
|
|
2519
|
+
async with self.async_session_factory() as sess:
|
|
2520
|
+
# Build base query grouped by session_id
|
|
2521
|
+
base_stmt = (
|
|
2522
|
+
select(
|
|
2523
|
+
table.c.session_id,
|
|
2524
|
+
table.c.user_id,
|
|
2525
|
+
table.c.agent_id,
|
|
2526
|
+
table.c.team_id,
|
|
2527
|
+
table.c.workflow_id,
|
|
2528
|
+
func.count(table.c.trace_id).label("total_traces"),
|
|
2529
|
+
func.min(table.c.created_at).label("first_trace_at"),
|
|
2530
|
+
func.max(table.c.created_at).label("last_trace_at"),
|
|
2531
|
+
)
|
|
2532
|
+
.where(table.c.session_id.isnot(None)) # Only sessions with session_id
|
|
2533
|
+
.group_by(
|
|
2534
|
+
table.c.session_id, table.c.user_id, table.c.agent_id, table.c.team_id, table.c.workflow_id
|
|
2535
|
+
)
|
|
2536
|
+
)
|
|
2537
|
+
|
|
2538
|
+
# Apply filters
|
|
2539
|
+
if user_id:
|
|
2540
|
+
base_stmt = base_stmt.where(table.c.user_id == user_id)
|
|
2541
|
+
if workflow_id:
|
|
2542
|
+
base_stmt = base_stmt.where(table.c.workflow_id == workflow_id)
|
|
2543
|
+
if team_id:
|
|
2544
|
+
base_stmt = base_stmt.where(table.c.team_id == team_id)
|
|
2545
|
+
if agent_id:
|
|
2546
|
+
base_stmt = base_stmt.where(table.c.agent_id == agent_id)
|
|
2547
|
+
if start_time:
|
|
2548
|
+
# Convert datetime to ISO string for comparison
|
|
2549
|
+
base_stmt = base_stmt.where(table.c.created_at >= start_time.isoformat())
|
|
2550
|
+
if end_time:
|
|
2551
|
+
# Convert datetime to ISO string for comparison
|
|
2552
|
+
base_stmt = base_stmt.where(table.c.created_at <= end_time.isoformat())
|
|
2553
|
+
|
|
2554
|
+
# Get total count of sessions
|
|
2555
|
+
count_stmt = select(func.count()).select_from(base_stmt.alias())
|
|
2556
|
+
total_count = await sess.scalar(count_stmt) or 0
|
|
2557
|
+
|
|
2558
|
+
# Apply pagination and ordering
|
|
2559
|
+
offset = (page - 1) * limit if page and limit else 0
|
|
2560
|
+
paginated_stmt = base_stmt.order_by(func.max(table.c.created_at).desc()).limit(limit).offset(offset)
|
|
2561
|
+
|
|
2562
|
+
result = await sess.execute(paginated_stmt)
|
|
2563
|
+
results = result.fetchall()
|
|
2564
|
+
|
|
2565
|
+
# Convert to list of dicts with datetime objects
|
|
2566
|
+
stats_list = []
|
|
2567
|
+
for row in results:
|
|
2568
|
+
# Convert ISO strings to datetime objects
|
|
2569
|
+
first_trace_at_str = row.first_trace_at
|
|
2570
|
+
last_trace_at_str = row.last_trace_at
|
|
2571
|
+
|
|
2572
|
+
# Parse ISO format strings to datetime objects
|
|
2573
|
+
first_trace_at = datetime.fromisoformat(first_trace_at_str.replace("Z", "+00:00"))
|
|
2574
|
+
last_trace_at = datetime.fromisoformat(last_trace_at_str.replace("Z", "+00:00"))
|
|
2575
|
+
|
|
2576
|
+
stats_list.append(
|
|
2577
|
+
{
|
|
2578
|
+
"session_id": row.session_id,
|
|
2579
|
+
"user_id": row.user_id,
|
|
2580
|
+
"agent_id": row.agent_id,
|
|
2581
|
+
"team_id": row.team_id,
|
|
2582
|
+
"workflow_id": row.workflow_id,
|
|
2583
|
+
"total_traces": row.total_traces,
|
|
2584
|
+
"first_trace_at": first_trace_at,
|
|
2585
|
+
"last_trace_at": last_trace_at,
|
|
2586
|
+
}
|
|
2587
|
+
)
|
|
2588
|
+
|
|
2589
|
+
return stats_list, total_count
|
|
2590
|
+
|
|
2591
|
+
except Exception as e:
|
|
2592
|
+
log_error(f"Error getting trace stats: {e}")
|
|
2593
|
+
return [], 0
|
|
2594
|
+
|
|
2595
|
+
# --- Spans ---
|
|
2596
|
+
async def create_span(self, span: "Span") -> None:
|
|
2597
|
+
"""Create a single span in the database.
|
|
2598
|
+
|
|
2599
|
+
Args:
|
|
2600
|
+
span: The Span object to store.
|
|
2601
|
+
"""
|
|
2602
|
+
try:
|
|
2603
|
+
table = await self._get_table(table_type="spans", create_table_if_not_found=True)
|
|
2604
|
+
|
|
2605
|
+
async with self.async_session_factory() as sess, sess.begin():
|
|
2606
|
+
span_dict = span.to_dict()
|
|
2607
|
+
# Sanitize string fields and nested JSON structures
|
|
2608
|
+
if span_dict.get("name"):
|
|
2609
|
+
span_dict["name"] = sanitize_postgres_string(span_dict["name"])
|
|
2610
|
+
if span_dict.get("status_code"):
|
|
2611
|
+
span_dict["status_code"] = sanitize_postgres_string(span_dict["status_code"])
|
|
2612
|
+
# Sanitize any nested dict/JSON fields
|
|
2613
|
+
span_dict = cast(Dict[str, Any], sanitize_postgres_strings(span_dict))
|
|
2614
|
+
stmt = postgresql.insert(table).values(span_dict)
|
|
2615
|
+
await sess.execute(stmt)
|
|
2616
|
+
|
|
2617
|
+
except Exception as e:
|
|
2618
|
+
log_error(f"Error creating span: {e}")
|
|
2619
|
+
|
|
2620
|
+
async def create_spans(self, spans: List) -> None:
|
|
2621
|
+
"""Create multiple spans in the database as a batch.
|
|
2622
|
+
|
|
2623
|
+
Args:
|
|
2624
|
+
spans: List of Span objects to store.
|
|
2625
|
+
"""
|
|
2626
|
+
if not spans:
|
|
2627
|
+
return
|
|
2628
|
+
|
|
2629
|
+
try:
|
|
2630
|
+
table = await self._get_table(table_type="spans", create_table_if_not_found=True)
|
|
2631
|
+
|
|
2632
|
+
async with self.async_session_factory() as sess, sess.begin():
|
|
2633
|
+
for span in spans:
|
|
2634
|
+
span_dict = span.to_dict()
|
|
2635
|
+
# Sanitize string fields and nested JSON structures
|
|
2636
|
+
if span_dict.get("name"):
|
|
2637
|
+
span_dict["name"] = sanitize_postgres_string(span_dict["name"])
|
|
2638
|
+
if span_dict.get("status_code"):
|
|
2639
|
+
span_dict["status_code"] = sanitize_postgres_string(span_dict["status_code"])
|
|
2640
|
+
# Sanitize any nested dict/JSON fields
|
|
2641
|
+
span_dict = sanitize_postgres_strings(span_dict)
|
|
2642
|
+
stmt = postgresql.insert(table).values(span_dict)
|
|
2643
|
+
await sess.execute(stmt)
|
|
2644
|
+
|
|
2645
|
+
except Exception as e:
|
|
2646
|
+
log_error(f"Error creating spans batch: {e}")
|
|
2647
|
+
|
|
2648
|
+
async def get_span(self, span_id: str):
|
|
2649
|
+
"""Get a single span by its span_id.
|
|
2650
|
+
|
|
2651
|
+
Args:
|
|
2652
|
+
span_id: The unique span identifier.
|
|
2653
|
+
|
|
2654
|
+
Returns:
|
|
2655
|
+
Optional[Span]: The span if found, None otherwise.
|
|
2656
|
+
"""
|
|
2657
|
+
try:
|
|
2658
|
+
from agno.tracing.schemas import Span
|
|
2659
|
+
|
|
2660
|
+
table = await self._get_table(table_type="spans")
|
|
2661
|
+
|
|
2662
|
+
async with self.async_session_factory() as sess:
|
|
2663
|
+
stmt = select(table).where(table.c.span_id == span_id)
|
|
2664
|
+
result = await sess.execute(stmt)
|
|
2665
|
+
row = result.fetchone()
|
|
2666
|
+
if row:
|
|
2667
|
+
return Span.from_dict(dict(row._mapping))
|
|
2668
|
+
return None
|
|
2669
|
+
|
|
2670
|
+
except Exception as e:
|
|
2671
|
+
log_error(f"Error getting span: {e}")
|
|
2672
|
+
return None
|
|
2673
|
+
|
|
2674
|
+
async def get_spans(
|
|
2675
|
+
self,
|
|
2676
|
+
trace_id: Optional[str] = None,
|
|
2677
|
+
parent_span_id: Optional[str] = None,
|
|
2678
|
+
limit: Optional[int] = 1000,
|
|
2679
|
+
) -> List:
|
|
2680
|
+
"""Get spans matching the provided filters.
|
|
2681
|
+
|
|
2682
|
+
Args:
|
|
2683
|
+
trace_id: Filter by trace ID.
|
|
2684
|
+
parent_span_id: Filter by parent span ID.
|
|
2685
|
+
limit: Maximum number of spans to return.
|
|
2686
|
+
|
|
2687
|
+
Returns:
|
|
2688
|
+
List[Span]: List of matching spans.
|
|
2689
|
+
"""
|
|
2690
|
+
try:
|
|
2691
|
+
from agno.tracing.schemas import Span
|
|
2692
|
+
|
|
2693
|
+
table = await self._get_table(table_type="spans")
|
|
2694
|
+
|
|
2695
|
+
async with self.async_session_factory() as sess:
|
|
2696
|
+
stmt = select(table)
|
|
2697
|
+
|
|
2698
|
+
# Apply filters
|
|
2699
|
+
if trace_id:
|
|
2700
|
+
stmt = stmt.where(table.c.trace_id == trace_id)
|
|
2701
|
+
if parent_span_id:
|
|
2702
|
+
stmt = stmt.where(table.c.parent_span_id == parent_span_id)
|
|
2703
|
+
|
|
2704
|
+
if limit:
|
|
2705
|
+
stmt = stmt.limit(limit)
|
|
2706
|
+
|
|
2707
|
+
result = await sess.execute(stmt)
|
|
2708
|
+
results = result.fetchall()
|
|
2709
|
+
return [Span.from_dict(dict(row._mapping)) for row in results]
|
|
2710
|
+
|
|
2711
|
+
except Exception as e:
|
|
2712
|
+
log_error(f"Error getting spans: {e}")
|
|
2713
|
+
return []
|
|
2714
|
+
|
|
2715
|
+
# -- Learning methods --
|
|
2716
|
+
async def get_learning(
|
|
2717
|
+
self,
|
|
2718
|
+
learning_type: str,
|
|
2719
|
+
user_id: Optional[str] = None,
|
|
2720
|
+
agent_id: Optional[str] = None,
|
|
2721
|
+
team_id: Optional[str] = None,
|
|
2722
|
+
session_id: Optional[str] = None,
|
|
2723
|
+
namespace: Optional[str] = None,
|
|
2724
|
+
entity_id: Optional[str] = None,
|
|
2725
|
+
entity_type: Optional[str] = None,
|
|
2726
|
+
) -> Optional[Dict[str, Any]]:
|
|
2727
|
+
"""Async retrieve a learning record.
|
|
2728
|
+
|
|
2729
|
+
Args:
|
|
2730
|
+
learning_type: Type of learning ('user_profile', 'session_context', etc.)
|
|
2731
|
+
user_id: Filter by user ID.
|
|
2732
|
+
agent_id: Filter by agent ID.
|
|
2733
|
+
team_id: Filter by team ID.
|
|
2734
|
+
session_id: Filter by session ID.
|
|
2735
|
+
namespace: Filter by namespace ('user', 'global', or custom).
|
|
2736
|
+
entity_id: Filter by entity ID (for entity-specific learnings).
|
|
2737
|
+
entity_type: Filter by entity type ('person', 'company', etc.).
|
|
2738
|
+
|
|
2739
|
+
Returns:
|
|
2740
|
+
Dict with 'content' key containing the learning data, or None.
|
|
2741
|
+
"""
|
|
2742
|
+
try:
|
|
2743
|
+
table = await self._get_table(table_type="learnings")
|
|
2744
|
+
if table is None:
|
|
2745
|
+
return None
|
|
2746
|
+
|
|
2747
|
+
async with self.async_session_factory() as sess:
|
|
2748
|
+
stmt = select(table).where(table.c.learning_type == learning_type)
|
|
2749
|
+
|
|
2750
|
+
if user_id is not None:
|
|
2751
|
+
stmt = stmt.where(table.c.user_id == user_id)
|
|
2752
|
+
if agent_id is not None:
|
|
2753
|
+
stmt = stmt.where(table.c.agent_id == agent_id)
|
|
2754
|
+
if team_id is not None:
|
|
2755
|
+
stmt = stmt.where(table.c.team_id == team_id)
|
|
2756
|
+
if session_id is not None:
|
|
2757
|
+
stmt = stmt.where(table.c.session_id == session_id)
|
|
2758
|
+
if namespace is not None:
|
|
2759
|
+
stmt = stmt.where(table.c.namespace == namespace)
|
|
2760
|
+
if entity_id is not None:
|
|
2761
|
+
stmt = stmt.where(table.c.entity_id == entity_id)
|
|
2762
|
+
if entity_type is not None:
|
|
2763
|
+
stmt = stmt.where(table.c.entity_type == entity_type)
|
|
2764
|
+
|
|
2765
|
+
result = await sess.execute(stmt)
|
|
2766
|
+
row = result.fetchone()
|
|
2767
|
+
if row is None:
|
|
2768
|
+
return None
|
|
2769
|
+
|
|
2770
|
+
row_dict = dict(row._mapping)
|
|
2771
|
+
return {"content": row_dict.get("content")}
|
|
2772
|
+
|
|
2773
|
+
except Exception as e:
|
|
2774
|
+
log_debug(f"Error retrieving learning: {e}")
|
|
2775
|
+
return None
|
|
2776
|
+
|
|
2777
|
+
async def upsert_learning(
|
|
2778
|
+
self,
|
|
2779
|
+
id: str,
|
|
2780
|
+
learning_type: str,
|
|
2781
|
+
content: Dict[str, Any],
|
|
2782
|
+
user_id: Optional[str] = None,
|
|
2783
|
+
agent_id: Optional[str] = None,
|
|
2784
|
+
team_id: Optional[str] = None,
|
|
2785
|
+
session_id: Optional[str] = None,
|
|
2786
|
+
namespace: Optional[str] = None,
|
|
2787
|
+
entity_id: Optional[str] = None,
|
|
2788
|
+
entity_type: Optional[str] = None,
|
|
2789
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
2790
|
+
) -> None:
|
|
2791
|
+
"""Async insert or update a learning record.
|
|
2792
|
+
|
|
2793
|
+
Args:
|
|
2794
|
+
id: Unique identifier for the learning.
|
|
2795
|
+
learning_type: Type of learning ('user_profile', 'session_context', etc.)
|
|
2796
|
+
content: The learning content as a dict.
|
|
2797
|
+
user_id: Associated user ID.
|
|
2798
|
+
agent_id: Associated agent ID.
|
|
2799
|
+
team_id: Associated team ID.
|
|
2800
|
+
session_id: Associated session ID.
|
|
2801
|
+
namespace: Namespace for scoping ('user', 'global', or custom).
|
|
2802
|
+
entity_id: Associated entity ID (for entity-specific learnings).
|
|
2803
|
+
entity_type: Entity type ('person', 'company', etc.).
|
|
2804
|
+
metadata: Optional metadata.
|
|
2805
|
+
"""
|
|
2806
|
+
try:
|
|
2807
|
+
table = await self._get_table(table_type="learnings", create_table_if_not_found=True)
|
|
2808
|
+
if table is None:
|
|
2809
|
+
return
|
|
2810
|
+
|
|
2811
|
+
current_time = int(time.time())
|
|
2812
|
+
|
|
2813
|
+
async with self.async_session_factory() as sess, sess.begin():
|
|
2814
|
+
stmt = postgresql.insert(table).values(
|
|
2815
|
+
learning_id=id,
|
|
2816
|
+
learning_type=learning_type,
|
|
2817
|
+
namespace=namespace,
|
|
2818
|
+
user_id=user_id,
|
|
2819
|
+
agent_id=agent_id,
|
|
2820
|
+
team_id=team_id,
|
|
2821
|
+
session_id=session_id,
|
|
2822
|
+
entity_id=entity_id,
|
|
2823
|
+
entity_type=entity_type,
|
|
2824
|
+
content=content,
|
|
2825
|
+
metadata=metadata,
|
|
2826
|
+
created_at=current_time,
|
|
2827
|
+
updated_at=current_time,
|
|
2828
|
+
)
|
|
2829
|
+
stmt = stmt.on_conflict_do_update(
|
|
2830
|
+
index_elements=["learning_id"],
|
|
2831
|
+
set_=dict(
|
|
2832
|
+
content=content,
|
|
2833
|
+
metadata=metadata,
|
|
2834
|
+
updated_at=current_time,
|
|
2835
|
+
),
|
|
2836
|
+
)
|
|
2837
|
+
await sess.execute(stmt)
|
|
2838
|
+
|
|
2839
|
+
log_debug(f"Upserted learning: {id}")
|
|
2840
|
+
|
|
2841
|
+
except Exception as e:
|
|
2842
|
+
log_debug(f"Error upserting learning: {e}")
|
|
2843
|
+
|
|
2844
|
+
async def delete_learning(self, id: str) -> bool:
|
|
2845
|
+
"""Async delete a learning record.
|
|
2846
|
+
|
|
2847
|
+
Args:
|
|
2848
|
+
id: The learning ID to delete.
|
|
2849
|
+
|
|
2850
|
+
Returns:
|
|
2851
|
+
True if deleted, False otherwise.
|
|
2852
|
+
"""
|
|
2853
|
+
try:
|
|
2854
|
+
table = await self._get_table(table_type="learnings")
|
|
2855
|
+
if table is None:
|
|
2856
|
+
return False
|
|
2857
|
+
|
|
2858
|
+
async with self.async_session_factory() as sess, sess.begin():
|
|
2859
|
+
stmt = table.delete().where(table.c.learning_id == id)
|
|
2860
|
+
result = await sess.execute(stmt)
|
|
2861
|
+
return getattr(result, "rowcount", 0) > 0
|
|
2862
|
+
|
|
2863
|
+
except Exception as e:
|
|
2864
|
+
log_debug(f"Error deleting learning: {e}")
|
|
2865
|
+
return False
|
|
2866
|
+
|
|
2867
|
+
async def get_learnings(
|
|
2868
|
+
self,
|
|
2869
|
+
learning_type: Optional[str] = None,
|
|
2870
|
+
user_id: Optional[str] = None,
|
|
2871
|
+
agent_id: Optional[str] = None,
|
|
2872
|
+
team_id: Optional[str] = None,
|
|
2873
|
+
session_id: Optional[str] = None,
|
|
2874
|
+
namespace: Optional[str] = None,
|
|
2875
|
+
entity_id: Optional[str] = None,
|
|
2876
|
+
entity_type: Optional[str] = None,
|
|
2877
|
+
limit: Optional[int] = None,
|
|
2878
|
+
) -> List[Dict[str, Any]]:
|
|
2879
|
+
"""Async get multiple learning records.
|
|
2880
|
+
|
|
2881
|
+
Args:
|
|
2882
|
+
learning_type: Filter by learning type.
|
|
2883
|
+
user_id: Filter by user ID.
|
|
2884
|
+
agent_id: Filter by agent ID.
|
|
2885
|
+
team_id: Filter by team ID.
|
|
2886
|
+
session_id: Filter by session ID.
|
|
2887
|
+
namespace: Filter by namespace ('user', 'global', or custom).
|
|
2888
|
+
entity_id: Filter by entity ID (for entity-specific learnings).
|
|
2889
|
+
entity_type: Filter by entity type ('person', 'company', etc.).
|
|
2890
|
+
limit: Maximum number of records to return.
|
|
2891
|
+
|
|
2892
|
+
Returns:
|
|
2893
|
+
List of learning records.
|
|
2894
|
+
"""
|
|
2895
|
+
try:
|
|
2896
|
+
table = await self._get_table(table_type="learnings")
|
|
2897
|
+
if table is None:
|
|
2898
|
+
return []
|
|
2899
|
+
|
|
2900
|
+
async with self.async_session_factory() as sess:
|
|
2901
|
+
stmt = select(table)
|
|
2902
|
+
|
|
2903
|
+
if learning_type is not None:
|
|
2904
|
+
stmt = stmt.where(table.c.learning_type == learning_type)
|
|
2905
|
+
if user_id is not None:
|
|
2906
|
+
stmt = stmt.where(table.c.user_id == user_id)
|
|
2907
|
+
if agent_id is not None:
|
|
2908
|
+
stmt = stmt.where(table.c.agent_id == agent_id)
|
|
2909
|
+
if team_id is not None:
|
|
2910
|
+
stmt = stmt.where(table.c.team_id == team_id)
|
|
2911
|
+
if session_id is not None:
|
|
2912
|
+
stmt = stmt.where(table.c.session_id == session_id)
|
|
2913
|
+
if namespace is not None:
|
|
2914
|
+
stmt = stmt.where(table.c.namespace == namespace)
|
|
2915
|
+
if entity_id is not None:
|
|
2916
|
+
stmt = stmt.where(table.c.entity_id == entity_id)
|
|
2917
|
+
if entity_type is not None:
|
|
2918
|
+
stmt = stmt.where(table.c.entity_type == entity_type)
|
|
2919
|
+
|
|
2920
|
+
stmt = stmt.order_by(table.c.updated_at.desc())
|
|
2921
|
+
|
|
2922
|
+
if limit is not None:
|
|
2923
|
+
stmt = stmt.limit(limit)
|
|
2924
|
+
|
|
2925
|
+
result = await sess.execute(stmt)
|
|
2926
|
+
rows = result.fetchall()
|
|
2927
|
+
return [dict(row._mapping) for row in rows]
|
|
2928
|
+
|
|
2929
|
+
except Exception as e:
|
|
2930
|
+
log_debug(f"Error getting learnings: {e}")
|
|
2931
|
+
return []
|
|
2932
|
+
|
|
2933
|
+
# --- Components (Not yet supported for async) ---
|
|
2934
|
+
def get_component(
|
|
2935
|
+
self,
|
|
2936
|
+
component_id: str,
|
|
2937
|
+
component_type: Optional[ComponentType] = None,
|
|
2938
|
+
) -> Optional[Dict[str, Any]]:
|
|
2939
|
+
raise NotImplementedError("Component methods not yet supported for async databases")
|
|
2940
|
+
|
|
2941
|
+
def upsert_component(
|
|
2942
|
+
self,
|
|
2943
|
+
component_id: str,
|
|
2944
|
+
component_type: Optional[ComponentType] = None,
|
|
2945
|
+
name: Optional[str] = None,
|
|
2946
|
+
description: Optional[str] = None,
|
|
2947
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
2948
|
+
) -> Dict[str, Any]:
|
|
2949
|
+
raise NotImplementedError("Component methods not yet supported for async databases")
|
|
2950
|
+
|
|
2951
|
+
def delete_component(
|
|
2952
|
+
self,
|
|
2953
|
+
component_id: str,
|
|
2954
|
+
hard_delete: bool = False,
|
|
2955
|
+
) -> bool:
|
|
2956
|
+
raise NotImplementedError("Component methods not yet supported for async databases")
|
|
2957
|
+
|
|
2958
|
+
def list_components(
|
|
2959
|
+
self,
|
|
2960
|
+
component_type: Optional[ComponentType] = None,
|
|
2961
|
+
include_deleted: bool = False,
|
|
2962
|
+
limit: int = 20,
|
|
2963
|
+
offset: int = 0,
|
|
2964
|
+
) -> Tuple[List[Dict[str, Any]], int]:
|
|
2965
|
+
raise NotImplementedError("Component methods not yet supported for async databases")
|
|
2966
|
+
|
|
2967
|
+
def create_component_with_config(
|
|
2968
|
+
self,
|
|
2969
|
+
component_id: str,
|
|
2970
|
+
component_type: ComponentType,
|
|
2971
|
+
name: Optional[str],
|
|
2972
|
+
config: Dict[str, Any],
|
|
2973
|
+
description: Optional[str] = None,
|
|
2974
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
2975
|
+
label: Optional[str] = None,
|
|
2976
|
+
stage: str = "draft",
|
|
2977
|
+
notes: Optional[str] = None,
|
|
2978
|
+
links: Optional[List[Dict[str, Any]]] = None,
|
|
2979
|
+
) -> Tuple[Dict[str, Any], Dict[str, Any]]:
|
|
2980
|
+
raise NotImplementedError("Component methods not yet supported for async databases")
|
|
2981
|
+
|
|
2982
|
+
def get_config(
|
|
2983
|
+
self,
|
|
2984
|
+
component_id: str,
|
|
2985
|
+
version: Optional[int] = None,
|
|
2986
|
+
label: Optional[str] = None,
|
|
2987
|
+
) -> Optional[Dict[str, Any]]:
|
|
2988
|
+
raise NotImplementedError("Component methods not yet supported for async databases")
|
|
2989
|
+
|
|
2990
|
+
def upsert_config(
|
|
2991
|
+
self,
|
|
2992
|
+
component_id: str,
|
|
2993
|
+
config: Optional[Dict[str, Any]] = None,
|
|
2994
|
+
version: Optional[int] = None,
|
|
2995
|
+
label: Optional[str] = None,
|
|
2996
|
+
stage: Optional[str] = None,
|
|
2997
|
+
notes: Optional[str] = None,
|
|
2998
|
+
links: Optional[List[Dict[str, Any]]] = None,
|
|
2999
|
+
) -> Dict[str, Any]:
|
|
3000
|
+
raise NotImplementedError("Component methods not yet supported for async databases")
|
|
3001
|
+
|
|
3002
|
+
def delete_config(
|
|
3003
|
+
self,
|
|
3004
|
+
component_id: str,
|
|
3005
|
+
version: int,
|
|
3006
|
+
) -> bool:
|
|
3007
|
+
raise NotImplementedError("Component methods not yet supported for async databases")
|
|
3008
|
+
|
|
3009
|
+
def list_configs(
|
|
3010
|
+
self,
|
|
3011
|
+
component_id: str,
|
|
3012
|
+
include_config: bool = False,
|
|
3013
|
+
) -> List[Dict[str, Any]]:
|
|
3014
|
+
raise NotImplementedError("Component methods not yet supported for async databases")
|
|
3015
|
+
|
|
3016
|
+
def set_current_version(
|
|
3017
|
+
self,
|
|
3018
|
+
component_id: str,
|
|
3019
|
+
version: int,
|
|
3020
|
+
) -> bool:
|
|
3021
|
+
raise NotImplementedError("Component methods not yet supported for async databases")
|
|
3022
|
+
|
|
3023
|
+
def get_links(
|
|
3024
|
+
self,
|
|
3025
|
+
component_id: str,
|
|
3026
|
+
version: int,
|
|
3027
|
+
link_kind: Optional[str] = None,
|
|
3028
|
+
) -> List[Dict[str, Any]]:
|
|
3029
|
+
raise NotImplementedError("Component methods not yet supported for async databases")
|
|
3030
|
+
|
|
3031
|
+
def get_dependents(
|
|
3032
|
+
self,
|
|
3033
|
+
component_id: str,
|
|
3034
|
+
version: Optional[int] = None,
|
|
3035
|
+
) -> List[Dict[str, Any]]:
|
|
3036
|
+
raise NotImplementedError("Component methods not yet supported for async databases")
|
|
3037
|
+
|
|
3038
|
+
def load_component_graph(
|
|
3039
|
+
self,
|
|
3040
|
+
component_id: str,
|
|
3041
|
+
version: Optional[int] = None,
|
|
3042
|
+
label: Optional[str] = None,
|
|
3043
|
+
) -> Optional[Dict[str, Any]]:
|
|
3044
|
+
raise NotImplementedError("Component methods not yet supported for async databases")
|