agno 2.2.13__py3-none-any.whl → 2.4.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agno/agent/__init__.py +6 -0
- agno/agent/agent.py +5252 -3145
- agno/agent/remote.py +525 -0
- agno/api/api.py +2 -0
- agno/client/__init__.py +3 -0
- agno/client/a2a/__init__.py +10 -0
- agno/client/a2a/client.py +554 -0
- agno/client/a2a/schemas.py +112 -0
- agno/client/a2a/utils.py +369 -0
- agno/client/os.py +2669 -0
- agno/compression/__init__.py +3 -0
- agno/compression/manager.py +247 -0
- agno/culture/manager.py +2 -2
- agno/db/base.py +927 -6
- agno/db/dynamo/dynamo.py +788 -2
- agno/db/dynamo/schemas.py +128 -0
- agno/db/dynamo/utils.py +26 -3
- agno/db/firestore/firestore.py +674 -50
- agno/db/firestore/schemas.py +41 -0
- agno/db/firestore/utils.py +25 -10
- agno/db/gcs_json/gcs_json_db.py +506 -3
- agno/db/gcs_json/utils.py +14 -2
- agno/db/in_memory/in_memory_db.py +203 -4
- agno/db/in_memory/utils.py +14 -2
- agno/db/json/json_db.py +498 -2
- agno/db/json/utils.py +14 -2
- agno/db/migrations/manager.py +199 -0
- agno/db/migrations/utils.py +19 -0
- agno/db/migrations/v1_to_v2.py +54 -16
- agno/db/migrations/versions/__init__.py +0 -0
- agno/db/migrations/versions/v2_3_0.py +977 -0
- agno/db/mongo/async_mongo.py +1013 -39
- agno/db/mongo/mongo.py +684 -4
- agno/db/mongo/schemas.py +48 -0
- agno/db/mongo/utils.py +17 -0
- agno/db/mysql/__init__.py +2 -1
- agno/db/mysql/async_mysql.py +2958 -0
- agno/db/mysql/mysql.py +722 -53
- agno/db/mysql/schemas.py +77 -11
- agno/db/mysql/utils.py +151 -8
- agno/db/postgres/async_postgres.py +1254 -137
- agno/db/postgres/postgres.py +2316 -93
- agno/db/postgres/schemas.py +153 -21
- agno/db/postgres/utils.py +22 -7
- agno/db/redis/redis.py +531 -3
- agno/db/redis/schemas.py +36 -0
- agno/db/redis/utils.py +31 -15
- agno/db/schemas/evals.py +1 -0
- agno/db/schemas/memory.py +20 -9
- agno/db/singlestore/schemas.py +70 -1
- agno/db/singlestore/singlestore.py +737 -74
- agno/db/singlestore/utils.py +13 -3
- agno/db/sqlite/async_sqlite.py +1069 -89
- agno/db/sqlite/schemas.py +133 -1
- agno/db/sqlite/sqlite.py +2203 -165
- agno/db/sqlite/utils.py +21 -11
- agno/db/surrealdb/models.py +25 -0
- agno/db/surrealdb/surrealdb.py +603 -1
- agno/db/utils.py +60 -0
- agno/eval/__init__.py +26 -3
- agno/eval/accuracy.py +25 -12
- agno/eval/agent_as_judge.py +871 -0
- agno/eval/base.py +29 -0
- agno/eval/performance.py +10 -4
- agno/eval/reliability.py +22 -13
- agno/eval/utils.py +2 -1
- agno/exceptions.py +42 -0
- agno/hooks/__init__.py +3 -0
- agno/hooks/decorator.py +164 -0
- agno/integrations/discord/client.py +13 -2
- agno/knowledge/__init__.py +4 -0
- agno/knowledge/chunking/code.py +90 -0
- agno/knowledge/chunking/document.py +65 -4
- agno/knowledge/chunking/fixed.py +4 -1
- agno/knowledge/chunking/markdown.py +102 -11
- agno/knowledge/chunking/recursive.py +2 -2
- agno/knowledge/chunking/semantic.py +130 -48
- agno/knowledge/chunking/strategy.py +18 -0
- agno/knowledge/embedder/azure_openai.py +0 -1
- agno/knowledge/embedder/google.py +1 -1
- agno/knowledge/embedder/mistral.py +1 -1
- agno/knowledge/embedder/nebius.py +1 -1
- agno/knowledge/embedder/openai.py +16 -12
- agno/knowledge/filesystem.py +412 -0
- agno/knowledge/knowledge.py +4261 -1199
- agno/knowledge/protocol.py +134 -0
- agno/knowledge/reader/arxiv_reader.py +3 -2
- agno/knowledge/reader/base.py +9 -7
- agno/knowledge/reader/csv_reader.py +91 -42
- agno/knowledge/reader/docx_reader.py +9 -10
- agno/knowledge/reader/excel_reader.py +225 -0
- agno/knowledge/reader/field_labeled_csv_reader.py +38 -48
- agno/knowledge/reader/firecrawl_reader.py +3 -2
- agno/knowledge/reader/json_reader.py +16 -22
- agno/knowledge/reader/markdown_reader.py +15 -14
- agno/knowledge/reader/pdf_reader.py +33 -28
- agno/knowledge/reader/pptx_reader.py +9 -10
- agno/knowledge/reader/reader_factory.py +135 -1
- agno/knowledge/reader/s3_reader.py +8 -16
- agno/knowledge/reader/tavily_reader.py +3 -3
- agno/knowledge/reader/text_reader.py +15 -14
- agno/knowledge/reader/utils/__init__.py +17 -0
- agno/knowledge/reader/utils/spreadsheet.py +114 -0
- agno/knowledge/reader/web_search_reader.py +8 -65
- agno/knowledge/reader/website_reader.py +16 -13
- agno/knowledge/reader/wikipedia_reader.py +36 -3
- agno/knowledge/reader/youtube_reader.py +3 -2
- agno/knowledge/remote_content/__init__.py +33 -0
- agno/knowledge/remote_content/config.py +266 -0
- agno/knowledge/remote_content/remote_content.py +105 -17
- agno/knowledge/utils.py +76 -22
- agno/learn/__init__.py +71 -0
- agno/learn/config.py +463 -0
- agno/learn/curate.py +185 -0
- agno/learn/machine.py +725 -0
- agno/learn/schemas.py +1114 -0
- agno/learn/stores/__init__.py +38 -0
- agno/learn/stores/decision_log.py +1156 -0
- agno/learn/stores/entity_memory.py +3275 -0
- agno/learn/stores/learned_knowledge.py +1583 -0
- agno/learn/stores/protocol.py +117 -0
- agno/learn/stores/session_context.py +1217 -0
- agno/learn/stores/user_memory.py +1495 -0
- agno/learn/stores/user_profile.py +1220 -0
- agno/learn/utils.py +209 -0
- agno/media.py +22 -6
- agno/memory/__init__.py +14 -1
- agno/memory/manager.py +223 -8
- agno/memory/strategies/__init__.py +15 -0
- agno/memory/strategies/base.py +66 -0
- agno/memory/strategies/summarize.py +196 -0
- agno/memory/strategies/types.py +37 -0
- agno/models/aimlapi/aimlapi.py +17 -0
- agno/models/anthropic/claude.py +434 -59
- agno/models/aws/bedrock.py +121 -20
- agno/models/aws/claude.py +131 -274
- agno/models/azure/ai_foundry.py +10 -6
- agno/models/azure/openai_chat.py +33 -10
- agno/models/base.py +1162 -561
- agno/models/cerebras/cerebras.py +120 -24
- agno/models/cerebras/cerebras_openai.py +21 -2
- agno/models/cohere/chat.py +65 -6
- agno/models/cometapi/cometapi.py +18 -1
- agno/models/dashscope/dashscope.py +2 -3
- agno/models/deepinfra/deepinfra.py +18 -1
- agno/models/deepseek/deepseek.py +69 -3
- agno/models/fireworks/fireworks.py +18 -1
- agno/models/google/gemini.py +959 -89
- agno/models/google/utils.py +22 -0
- agno/models/groq/groq.py +48 -18
- agno/models/huggingface/huggingface.py +17 -6
- agno/models/ibm/watsonx.py +16 -6
- agno/models/internlm/internlm.py +18 -1
- agno/models/langdb/langdb.py +13 -1
- agno/models/litellm/chat.py +88 -9
- agno/models/litellm/litellm_openai.py +18 -1
- agno/models/message.py +24 -5
- agno/models/meta/llama.py +40 -13
- agno/models/meta/llama_openai.py +22 -21
- agno/models/metrics.py +12 -0
- agno/models/mistral/mistral.py +8 -4
- agno/models/n1n/__init__.py +3 -0
- agno/models/n1n/n1n.py +57 -0
- agno/models/nebius/nebius.py +6 -7
- agno/models/nvidia/nvidia.py +20 -3
- agno/models/ollama/__init__.py +2 -0
- agno/models/ollama/chat.py +17 -6
- agno/models/ollama/responses.py +100 -0
- agno/models/openai/__init__.py +2 -0
- agno/models/openai/chat.py +117 -26
- agno/models/openai/open_responses.py +46 -0
- agno/models/openai/responses.py +110 -32
- agno/models/openrouter/__init__.py +2 -0
- agno/models/openrouter/openrouter.py +67 -2
- agno/models/openrouter/responses.py +146 -0
- agno/models/perplexity/perplexity.py +19 -1
- agno/models/portkey/portkey.py +7 -6
- agno/models/requesty/requesty.py +19 -2
- agno/models/response.py +20 -2
- agno/models/sambanova/sambanova.py +20 -3
- agno/models/siliconflow/siliconflow.py +19 -2
- agno/models/together/together.py +20 -3
- agno/models/vercel/v0.py +20 -3
- agno/models/vertexai/claude.py +124 -4
- agno/models/vllm/vllm.py +19 -14
- agno/models/xai/xai.py +19 -2
- agno/os/app.py +467 -137
- agno/os/auth.py +253 -5
- agno/os/config.py +22 -0
- agno/os/interfaces/a2a/a2a.py +7 -6
- agno/os/interfaces/a2a/router.py +635 -26
- agno/os/interfaces/a2a/utils.py +32 -33
- agno/os/interfaces/agui/agui.py +5 -3
- agno/os/interfaces/agui/router.py +26 -16
- agno/os/interfaces/agui/utils.py +97 -57
- agno/os/interfaces/base.py +7 -7
- agno/os/interfaces/slack/router.py +16 -7
- agno/os/interfaces/slack/slack.py +7 -7
- agno/os/interfaces/whatsapp/router.py +35 -7
- agno/os/interfaces/whatsapp/security.py +3 -1
- agno/os/interfaces/whatsapp/whatsapp.py +11 -8
- agno/os/managers.py +326 -0
- agno/os/mcp.py +652 -79
- agno/os/middleware/__init__.py +4 -0
- agno/os/middleware/jwt.py +718 -115
- agno/os/middleware/trailing_slash.py +27 -0
- agno/os/router.py +105 -1558
- agno/os/routers/agents/__init__.py +3 -0
- agno/os/routers/agents/router.py +655 -0
- agno/os/routers/agents/schema.py +288 -0
- agno/os/routers/components/__init__.py +3 -0
- agno/os/routers/components/components.py +475 -0
- agno/os/routers/database.py +155 -0
- agno/os/routers/evals/evals.py +111 -18
- agno/os/routers/evals/schemas.py +38 -5
- agno/os/routers/evals/utils.py +80 -11
- agno/os/routers/health.py +3 -3
- agno/os/routers/knowledge/knowledge.py +284 -35
- agno/os/routers/knowledge/schemas.py +14 -2
- agno/os/routers/memory/memory.py +274 -11
- agno/os/routers/memory/schemas.py +44 -3
- agno/os/routers/metrics/metrics.py +30 -15
- agno/os/routers/metrics/schemas.py +10 -6
- agno/os/routers/registry/__init__.py +3 -0
- agno/os/routers/registry/registry.py +337 -0
- agno/os/routers/session/session.py +143 -14
- agno/os/routers/teams/__init__.py +3 -0
- agno/os/routers/teams/router.py +550 -0
- agno/os/routers/teams/schema.py +280 -0
- agno/os/routers/traces/__init__.py +3 -0
- agno/os/routers/traces/schemas.py +414 -0
- agno/os/routers/traces/traces.py +549 -0
- agno/os/routers/workflows/__init__.py +3 -0
- agno/os/routers/workflows/router.py +757 -0
- agno/os/routers/workflows/schema.py +139 -0
- agno/os/schema.py +157 -584
- agno/os/scopes.py +469 -0
- agno/os/settings.py +3 -0
- agno/os/utils.py +574 -185
- agno/reasoning/anthropic.py +85 -1
- agno/reasoning/azure_ai_foundry.py +93 -1
- agno/reasoning/deepseek.py +102 -2
- agno/reasoning/default.py +6 -7
- agno/reasoning/gemini.py +87 -3
- agno/reasoning/groq.py +109 -2
- agno/reasoning/helpers.py +6 -7
- agno/reasoning/manager.py +1238 -0
- agno/reasoning/ollama.py +93 -1
- agno/reasoning/openai.py +115 -1
- agno/reasoning/vertexai.py +85 -1
- agno/registry/__init__.py +3 -0
- agno/registry/registry.py +68 -0
- agno/remote/__init__.py +3 -0
- agno/remote/base.py +581 -0
- agno/run/__init__.py +2 -4
- agno/run/agent.py +134 -19
- agno/run/base.py +49 -1
- agno/run/cancel.py +65 -52
- agno/run/cancellation_management/__init__.py +9 -0
- agno/run/cancellation_management/base.py +78 -0
- agno/run/cancellation_management/in_memory_cancellation_manager.py +100 -0
- agno/run/cancellation_management/redis_cancellation_manager.py +236 -0
- agno/run/requirement.py +181 -0
- agno/run/team.py +111 -19
- agno/run/workflow.py +2 -1
- agno/session/agent.py +57 -92
- agno/session/summary.py +1 -1
- agno/session/team.py +62 -115
- agno/session/workflow.py +353 -57
- agno/skills/__init__.py +17 -0
- agno/skills/agent_skills.py +377 -0
- agno/skills/errors.py +32 -0
- agno/skills/loaders/__init__.py +4 -0
- agno/skills/loaders/base.py +27 -0
- agno/skills/loaders/local.py +216 -0
- agno/skills/skill.py +65 -0
- agno/skills/utils.py +107 -0
- agno/skills/validator.py +277 -0
- agno/table.py +10 -0
- agno/team/__init__.py +5 -1
- agno/team/remote.py +447 -0
- agno/team/team.py +3769 -2202
- agno/tools/brandfetch.py +27 -18
- agno/tools/browserbase.py +225 -16
- agno/tools/crawl4ai.py +3 -0
- agno/tools/duckduckgo.py +25 -71
- agno/tools/exa.py +0 -21
- agno/tools/file.py +14 -13
- agno/tools/file_generation.py +12 -6
- agno/tools/firecrawl.py +15 -7
- agno/tools/function.py +94 -113
- agno/tools/google_bigquery.py +11 -2
- agno/tools/google_drive.py +4 -3
- agno/tools/knowledge.py +9 -4
- agno/tools/mcp/mcp.py +301 -18
- agno/tools/mcp/multi_mcp.py +269 -14
- agno/tools/mem0.py +11 -10
- agno/tools/memory.py +47 -46
- agno/tools/mlx_transcribe.py +10 -7
- agno/tools/models/nebius.py +5 -5
- agno/tools/models_labs.py +20 -10
- agno/tools/nano_banana.py +151 -0
- agno/tools/parallel.py +0 -7
- agno/tools/postgres.py +76 -36
- agno/tools/python.py +14 -6
- agno/tools/reasoning.py +30 -23
- agno/tools/redshift.py +406 -0
- agno/tools/shopify.py +1519 -0
- agno/tools/spotify.py +919 -0
- agno/tools/tavily.py +4 -1
- agno/tools/toolkit.py +253 -18
- agno/tools/websearch.py +93 -0
- agno/tools/website.py +1 -1
- agno/tools/wikipedia.py +1 -1
- agno/tools/workflow.py +56 -48
- agno/tools/yfinance.py +12 -11
- agno/tracing/__init__.py +12 -0
- agno/tracing/exporter.py +161 -0
- agno/tracing/schemas.py +276 -0
- agno/tracing/setup.py +112 -0
- agno/utils/agent.py +251 -10
- agno/utils/cryptography.py +22 -0
- agno/utils/dttm.py +33 -0
- agno/utils/events.py +264 -7
- agno/utils/hooks.py +111 -3
- agno/utils/http.py +161 -2
- agno/utils/mcp.py +49 -8
- agno/utils/media.py +22 -1
- agno/utils/models/ai_foundry.py +9 -2
- agno/utils/models/claude.py +20 -5
- agno/utils/models/cohere.py +9 -2
- agno/utils/models/llama.py +9 -2
- agno/utils/models/mistral.py +4 -2
- agno/utils/os.py +0 -0
- agno/utils/print_response/agent.py +99 -16
- agno/utils/print_response/team.py +223 -24
- agno/utils/print_response/workflow.py +0 -2
- agno/utils/prompts.py +8 -6
- agno/utils/remote.py +23 -0
- agno/utils/response.py +1 -13
- agno/utils/string.py +91 -2
- agno/utils/team.py +62 -12
- agno/utils/tokens.py +657 -0
- agno/vectordb/base.py +15 -2
- agno/vectordb/cassandra/cassandra.py +1 -1
- agno/vectordb/chroma/__init__.py +2 -1
- agno/vectordb/chroma/chromadb.py +468 -23
- agno/vectordb/clickhouse/clickhousedb.py +1 -1
- agno/vectordb/couchbase/couchbase.py +6 -2
- agno/vectordb/lancedb/lance_db.py +7 -38
- agno/vectordb/lightrag/lightrag.py +7 -6
- agno/vectordb/milvus/milvus.py +118 -84
- agno/vectordb/mongodb/__init__.py +2 -1
- agno/vectordb/mongodb/mongodb.py +14 -31
- agno/vectordb/pgvector/pgvector.py +120 -66
- agno/vectordb/pineconedb/pineconedb.py +2 -19
- agno/vectordb/qdrant/__init__.py +2 -1
- agno/vectordb/qdrant/qdrant.py +33 -56
- agno/vectordb/redis/__init__.py +2 -1
- agno/vectordb/redis/redisdb.py +19 -31
- agno/vectordb/singlestore/singlestore.py +17 -9
- agno/vectordb/surrealdb/surrealdb.py +2 -38
- agno/vectordb/weaviate/__init__.py +2 -1
- agno/vectordb/weaviate/weaviate.py +7 -3
- agno/workflow/__init__.py +5 -1
- agno/workflow/agent.py +2 -2
- agno/workflow/condition.py +12 -10
- agno/workflow/loop.py +28 -9
- agno/workflow/parallel.py +21 -13
- agno/workflow/remote.py +362 -0
- agno/workflow/router.py +12 -9
- agno/workflow/step.py +261 -36
- agno/workflow/steps.py +12 -8
- agno/workflow/types.py +40 -77
- agno/workflow/workflow.py +939 -213
- {agno-2.2.13.dist-info → agno-2.4.3.dist-info}/METADATA +134 -181
- agno-2.4.3.dist-info/RECORD +677 -0
- {agno-2.2.13.dist-info → agno-2.4.3.dist-info}/WHEEL +1 -1
- agno/tools/googlesearch.py +0 -98
- agno/tools/memori.py +0 -339
- agno-2.2.13.dist-info/RECORD +0 -575
- {agno-2.2.13.dist-info → agno-2.4.3.dist-info}/licenses/LICENSE +0 -0
- {agno-2.2.13.dist-info → agno-2.4.3.dist-info}/top_level.txt +0 -0
agno/db/sqlite/sqlite.py
CHANGED
|
@@ -1,10 +1,14 @@
|
|
|
1
1
|
import time
|
|
2
2
|
from datetime import date, datetime, timedelta, timezone
|
|
3
3
|
from pathlib import Path
|
|
4
|
-
from typing import Any, Dict, List, Optional, Sequence, Tuple, Union, cast
|
|
4
|
+
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Tuple, Union, cast
|
|
5
5
|
from uuid import uuid4
|
|
6
6
|
|
|
7
|
-
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from agno.tracing.schemas import Span, Trace
|
|
9
|
+
|
|
10
|
+
from agno.db.base import BaseDb, ComponentType, SessionType
|
|
11
|
+
from agno.db.migrations.manager import MigrationManager
|
|
8
12
|
from agno.db.schemas.culture import CulturalKnowledge
|
|
9
13
|
from agno.db.schemas.evals import EvalFilterType, EvalRunRecord, EvalType
|
|
10
14
|
from agno.db.schemas.knowledge import KnowledgeRow
|
|
@@ -27,11 +31,11 @@ from agno.utils.log import log_debug, log_error, log_info, log_warning
|
|
|
27
31
|
from agno.utils.string import generate_id
|
|
28
32
|
|
|
29
33
|
try:
|
|
30
|
-
from sqlalchemy import Column, MetaData,
|
|
34
|
+
from sqlalchemy import Column, MetaData, String, Table, func, select, text
|
|
31
35
|
from sqlalchemy.dialects import sqlite
|
|
32
36
|
from sqlalchemy.engine import Engine, create_engine
|
|
33
37
|
from sqlalchemy.orm import scoped_session, sessionmaker
|
|
34
|
-
from sqlalchemy.schema import Index, UniqueConstraint
|
|
38
|
+
from sqlalchemy.schema import ForeignKey, Index, UniqueConstraint
|
|
35
39
|
except ImportError:
|
|
36
40
|
raise ImportError("`sqlalchemy` not installed. Please install it using `pip install sqlalchemy`")
|
|
37
41
|
|
|
@@ -48,6 +52,13 @@ class SqliteDb(BaseDb):
|
|
|
48
52
|
metrics_table: Optional[str] = None,
|
|
49
53
|
eval_table: Optional[str] = None,
|
|
50
54
|
knowledge_table: Optional[str] = None,
|
|
55
|
+
traces_table: Optional[str] = None,
|
|
56
|
+
spans_table: Optional[str] = None,
|
|
57
|
+
versions_table: Optional[str] = None,
|
|
58
|
+
components_table: Optional[str] = None,
|
|
59
|
+
component_configs_table: Optional[str] = None,
|
|
60
|
+
component_links_table: Optional[str] = None,
|
|
61
|
+
learnings_table: Optional[str] = None,
|
|
51
62
|
id: Optional[str] = None,
|
|
52
63
|
):
|
|
53
64
|
"""
|
|
@@ -69,6 +80,13 @@ class SqliteDb(BaseDb):
|
|
|
69
80
|
metrics_table (Optional[str]): Name of the table to store metrics.
|
|
70
81
|
eval_table (Optional[str]): Name of the table to store evaluation runs data.
|
|
71
82
|
knowledge_table (Optional[str]): Name of the table to store knowledge documents data.
|
|
83
|
+
traces_table (Optional[str]): Name of the table to store run traces.
|
|
84
|
+
spans_table (Optional[str]): Name of the table to store span events.
|
|
85
|
+
versions_table (Optional[str]): Name of the table to store schema versions.
|
|
86
|
+
components_table (Optional[str]): Name of the table to store components.
|
|
87
|
+
component_configs_table (Optional[str]): Name of the table to store component configurations.
|
|
88
|
+
component_links_table (Optional[str]): Name of the table to store component links.
|
|
89
|
+
learnings_table (Optional[str]): Name of the table to store learning records.
|
|
72
90
|
id (Optional[str]): ID of the database.
|
|
73
91
|
|
|
74
92
|
Raises:
|
|
@@ -86,6 +104,13 @@ class SqliteDb(BaseDb):
|
|
|
86
104
|
metrics_table=metrics_table,
|
|
87
105
|
eval_table=eval_table,
|
|
88
106
|
knowledge_table=knowledge_table,
|
|
107
|
+
traces_table=traces_table,
|
|
108
|
+
spans_table=spans_table,
|
|
109
|
+
versions_table=versions_table,
|
|
110
|
+
components_table=components_table,
|
|
111
|
+
component_configs_table=component_configs_table,
|
|
112
|
+
component_links_table=component_links_table,
|
|
113
|
+
learnings_table=learnings_table,
|
|
89
114
|
)
|
|
90
115
|
|
|
91
116
|
_engine: Optional[Engine] = db_engine
|
|
@@ -112,6 +137,47 @@ class SqliteDb(BaseDb):
|
|
|
112
137
|
# Initialize database session
|
|
113
138
|
self.Session: scoped_session = scoped_session(sessionmaker(bind=self.db_engine))
|
|
114
139
|
|
|
140
|
+
# -- Serialization methods --
|
|
141
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
142
|
+
base = super().to_dict()
|
|
143
|
+
base.update(
|
|
144
|
+
{
|
|
145
|
+
"db_file": self.db_file,
|
|
146
|
+
"db_url": self.db_url,
|
|
147
|
+
"type": "sqlite",
|
|
148
|
+
}
|
|
149
|
+
)
|
|
150
|
+
return base
|
|
151
|
+
|
|
152
|
+
@classmethod
|
|
153
|
+
def from_dict(cls, data: Dict[str, Any]) -> "SqliteDb":
|
|
154
|
+
return cls(
|
|
155
|
+
db_file=data.get("db_file"),
|
|
156
|
+
db_url=data.get("db_url"),
|
|
157
|
+
session_table=data.get("session_table"),
|
|
158
|
+
culture_table=data.get("culture_table"),
|
|
159
|
+
memory_table=data.get("memory_table"),
|
|
160
|
+
metrics_table=data.get("metrics_table"),
|
|
161
|
+
eval_table=data.get("eval_table"),
|
|
162
|
+
knowledge_table=data.get("knowledge_table"),
|
|
163
|
+
traces_table=data.get("traces_table"),
|
|
164
|
+
spans_table=data.get("spans_table"),
|
|
165
|
+
versions_table=data.get("versions_table"),
|
|
166
|
+
components_table=data.get("components_table"),
|
|
167
|
+
component_configs_table=data.get("component_configs_table"),
|
|
168
|
+
component_links_table=data.get("component_links_table"),
|
|
169
|
+
id=data.get("id"),
|
|
170
|
+
)
|
|
171
|
+
|
|
172
|
+
def close(self) -> None:
|
|
173
|
+
"""Close database connections and dispose of the connection pool.
|
|
174
|
+
|
|
175
|
+
Should be called during application shutdown to properly release
|
|
176
|
+
all database connections.
|
|
177
|
+
"""
|
|
178
|
+
if self.db_engine is not None:
|
|
179
|
+
self.db_engine.dispose()
|
|
180
|
+
|
|
115
181
|
# -- DB methods --
|
|
116
182
|
def table_exists(self, table_name: str) -> bool:
|
|
117
183
|
"""Check if a table with the given name exists in the SQLite database.
|
|
@@ -133,15 +199,26 @@ class SqliteDb(BaseDb):
|
|
|
133
199
|
(self.metrics_table_name, "metrics"),
|
|
134
200
|
(self.eval_table_name, "evals"),
|
|
135
201
|
(self.knowledge_table_name, "knowledge"),
|
|
202
|
+
(self.versions_table_name, "versions"),
|
|
203
|
+
(self.components_table_name, "components"),
|
|
204
|
+
(self.component_configs_table_name, "component_configs"),
|
|
205
|
+
(self.component_links_table_name, "component_links"),
|
|
206
|
+
(self.learnings_table_name, "learnings"),
|
|
136
207
|
]
|
|
137
208
|
|
|
138
209
|
for table_name, table_type in tables_to_create:
|
|
139
|
-
self.
|
|
210
|
+
self._get_or_create_table(table_name=table_name, table_type=table_type, create_table_if_not_found=True)
|
|
140
211
|
|
|
141
212
|
def _create_table(self, table_name: str, table_type: str) -> Table:
|
|
142
213
|
"""
|
|
143
214
|
Create a table with the appropriate schema based on the table type.
|
|
144
215
|
|
|
216
|
+
Supports:
|
|
217
|
+
- _unique_constraints: [{"name": "...", "columns": [...]}]
|
|
218
|
+
- __primary_key__: ["col1", "col2", ...]
|
|
219
|
+
- __foreign_keys__: [{"columns":[...], "ref_table":"...", "ref_columns":[...]}]
|
|
220
|
+
- column-level foreign_key: "logical_table.column" (resolved via _resolve_* helpers)
|
|
221
|
+
|
|
145
222
|
Args:
|
|
146
223
|
table_name (str): Name of the table to create
|
|
147
224
|
table_type (str): Type of table (used to get schema definition)
|
|
@@ -150,53 +227,117 @@ class SqliteDb(BaseDb):
|
|
|
150
227
|
Table: SQLAlchemy Table object
|
|
151
228
|
"""
|
|
152
229
|
try:
|
|
153
|
-
|
|
154
|
-
|
|
230
|
+
from sqlalchemy.schema import ForeignKeyConstraint, PrimaryKeyConstraint
|
|
231
|
+
|
|
232
|
+
# Pass traces_table_name for spans table foreign key resolution
|
|
233
|
+
table_schema = get_table_schema_definition(table_type, traces_table_name=self.trace_table_name).copy()
|
|
155
234
|
|
|
156
235
|
columns: List[Column] = []
|
|
157
236
|
indexes: List[str] = []
|
|
158
|
-
|
|
237
|
+
|
|
238
|
+
# Extract special schema keys before iterating columns
|
|
159
239
|
schema_unique_constraints = table_schema.pop("_unique_constraints", [])
|
|
240
|
+
schema_primary_key = table_schema.pop("__primary_key__", None)
|
|
241
|
+
schema_foreign_keys = table_schema.pop("__foreign_keys__", [])
|
|
160
242
|
|
|
161
|
-
#
|
|
243
|
+
# Build columns
|
|
162
244
|
for col_name, col_config in table_schema.items():
|
|
163
245
|
column_args = [col_name, col_config["type"]()]
|
|
164
|
-
column_kwargs = {}
|
|
246
|
+
column_kwargs: Dict[str, Any] = {}
|
|
165
247
|
|
|
166
|
-
if
|
|
248
|
+
# Column-level PK only if no composite PK is defined
|
|
249
|
+
if col_config.get("primary_key", False) and schema_primary_key is None:
|
|
167
250
|
column_kwargs["primary_key"] = True
|
|
251
|
+
|
|
168
252
|
if "nullable" in col_config:
|
|
169
253
|
column_kwargs["nullable"] = col_config["nullable"]
|
|
254
|
+
|
|
255
|
+
if "default" in col_config:
|
|
256
|
+
column_kwargs["default"] = col_config["default"]
|
|
257
|
+
|
|
170
258
|
if col_config.get("index", False):
|
|
171
259
|
indexes.append(col_name)
|
|
260
|
+
|
|
172
261
|
if col_config.get("unique", False):
|
|
173
262
|
column_kwargs["unique"] = True
|
|
174
|
-
|
|
263
|
+
|
|
264
|
+
# Single-column FK
|
|
265
|
+
if "foreign_key" in col_config:
|
|
266
|
+
fk_ref = self._resolve_fk_reference(col_config["foreign_key"])
|
|
267
|
+
column_args.append(ForeignKey(fk_ref))
|
|
175
268
|
|
|
176
269
|
columns.append(Column(*column_args, **column_kwargs)) # type: ignore
|
|
177
270
|
|
|
178
271
|
# Create the table object
|
|
179
|
-
|
|
180
|
-
|
|
272
|
+
table = Table(table_name, self.metadata, *columns)
|
|
273
|
+
|
|
274
|
+
# Composite PK
|
|
275
|
+
if schema_primary_key is not None:
|
|
276
|
+
missing = [c for c in schema_primary_key if c not in table.c]
|
|
277
|
+
if missing:
|
|
278
|
+
raise ValueError(f"Composite PK references missing columns in {table_name}: {missing}")
|
|
279
|
+
|
|
280
|
+
pk_constraint_name = f"{table_name}_pkey"
|
|
281
|
+
table.append_constraint(PrimaryKeyConstraint(*schema_primary_key, name=pk_constraint_name))
|
|
282
|
+
|
|
283
|
+
# Composite FKs
|
|
284
|
+
for fk_config in schema_foreign_keys:
|
|
285
|
+
fk_columns = fk_config["columns"]
|
|
286
|
+
ref_table_logical = fk_config["ref_table"]
|
|
287
|
+
ref_columns = fk_config["ref_columns"]
|
|
288
|
+
|
|
289
|
+
if len(fk_columns) != len(ref_columns):
|
|
290
|
+
raise ValueError(
|
|
291
|
+
f"Composite FK in {table_name} has mismatched columns/ref_columns: {fk_columns} vs {ref_columns}"
|
|
292
|
+
)
|
|
181
293
|
|
|
182
|
-
|
|
294
|
+
missing = [c for c in fk_columns if c not in table.c]
|
|
295
|
+
if missing:
|
|
296
|
+
raise ValueError(f"Composite FK references missing columns in {table_name}: {missing}")
|
|
297
|
+
|
|
298
|
+
resolved_ref_table = self._resolve_table_name(ref_table_logical)
|
|
299
|
+
fk_constraint_name = f"{table_name}_{'_'.join(fk_columns)}_fkey"
|
|
300
|
+
|
|
301
|
+
ref_column_strings = [f"{resolved_ref_table}.{col}" for col in ref_columns]
|
|
302
|
+
|
|
303
|
+
table.append_constraint(
|
|
304
|
+
ForeignKeyConstraint(
|
|
305
|
+
fk_columns,
|
|
306
|
+
ref_column_strings,
|
|
307
|
+
name=fk_constraint_name,
|
|
308
|
+
)
|
|
309
|
+
)
|
|
310
|
+
|
|
311
|
+
# Multi-column unique constraints
|
|
183
312
|
for constraint in schema_unique_constraints:
|
|
184
313
|
constraint_name = f"{table_name}_{constraint['name']}"
|
|
185
314
|
constraint_columns = constraint["columns"]
|
|
315
|
+
|
|
316
|
+
missing = [c for c in constraint_columns if c not in table.c]
|
|
317
|
+
if missing:
|
|
318
|
+
raise ValueError(f"Unique constraint references missing columns in {table_name}: {missing}")
|
|
319
|
+
|
|
186
320
|
table.append_constraint(UniqueConstraint(*constraint_columns, name=constraint_name))
|
|
187
321
|
|
|
188
|
-
#
|
|
322
|
+
# Indexes
|
|
189
323
|
for idx_col in indexes:
|
|
324
|
+
if idx_col not in table.c:
|
|
325
|
+
raise ValueError(f"Index references missing column in {table_name}: {idx_col}")
|
|
190
326
|
idx_name = f"idx_{table_name}_{idx_col}"
|
|
191
|
-
|
|
327
|
+
Index(idx_name, table.c[idx_col]) # Correct way; do NOT append as constraint
|
|
192
328
|
|
|
193
329
|
# Create table
|
|
194
|
-
|
|
330
|
+
table_created = False
|
|
331
|
+
if not self.table_exists(table_name):
|
|
332
|
+
table.create(self.db_engine, checkfirst=True)
|
|
333
|
+
log_debug(f"Successfully created table '{table_name}'")
|
|
334
|
+
table_created = True
|
|
335
|
+
else:
|
|
336
|
+
log_debug(f"Table '{table_name}' already exists, skipping creation")
|
|
195
337
|
|
|
196
|
-
# Create indexes
|
|
338
|
+
# Create indexes (SQLite)
|
|
197
339
|
for idx in table.indexes:
|
|
198
340
|
try:
|
|
199
|
-
log_debug(f"Creating index: {idx.name}")
|
|
200
341
|
# Check if index already exists
|
|
201
342
|
with self.Session() as sess:
|
|
202
343
|
exists_query = text("SELECT 1 FROM sqlite_master WHERE type = 'index' AND name = :index_name")
|
|
@@ -206,17 +347,60 @@ class SqliteDb(BaseDb):
|
|
|
206
347
|
continue
|
|
207
348
|
|
|
208
349
|
idx.create(self.db_engine)
|
|
350
|
+
log_debug(f"Created index: {idx.name} for table {table_name}")
|
|
209
351
|
|
|
210
352
|
except Exception as e:
|
|
211
353
|
log_warning(f"Error creating index {idx.name}: {e}")
|
|
212
354
|
|
|
213
|
-
|
|
355
|
+
# Store the schema version for the created table
|
|
356
|
+
if table_name != self.versions_table_name and table_created:
|
|
357
|
+
latest_schema_version = MigrationManager(self).latest_schema_version
|
|
358
|
+
self.upsert_schema_version(table_name=table_name, version=latest_schema_version.public)
|
|
359
|
+
|
|
214
360
|
return table
|
|
215
361
|
|
|
216
362
|
except Exception as e:
|
|
363
|
+
from traceback import print_exc
|
|
364
|
+
|
|
365
|
+
print_exc()
|
|
217
366
|
log_error(f"Could not create table '{table_name}': {e}")
|
|
218
367
|
raise e
|
|
219
368
|
|
|
369
|
+
def _resolve_fk_reference(self, fk_ref: str) -> str:
|
|
370
|
+
"""
|
|
371
|
+
Resolve a simple foreign key reference to the actual table name.
|
|
372
|
+
|
|
373
|
+
Accepts:
|
|
374
|
+
- "logical_table.column" -> "{resolved_table}.{column}"
|
|
375
|
+
- already-qualified refs -> returned as-is
|
|
376
|
+
"""
|
|
377
|
+
parts = fk_ref.split(".")
|
|
378
|
+
if len(parts) == 2:
|
|
379
|
+
table, column = parts
|
|
380
|
+
resolved_table = self._resolve_table_name(table)
|
|
381
|
+
return f"{resolved_table}.{column}"
|
|
382
|
+
return fk_ref
|
|
383
|
+
|
|
384
|
+
def _resolve_table_name(self, logical_name: str) -> str:
|
|
385
|
+
"""
|
|
386
|
+
Resolve logical table name to configured table name.
|
|
387
|
+
"""
|
|
388
|
+
table_map = {
|
|
389
|
+
"components": self.components_table_name,
|
|
390
|
+
"component_configs": self.component_configs_table_name,
|
|
391
|
+
"component_links": self.component_links_table_name,
|
|
392
|
+
"traces": self.trace_table_name,
|
|
393
|
+
"spans": self.span_table_name,
|
|
394
|
+
"sessions": self.session_table_name,
|
|
395
|
+
"memories": self.memory_table_name,
|
|
396
|
+
"metrics": self.metrics_table_name,
|
|
397
|
+
"evals": self.eval_table_name,
|
|
398
|
+
"knowledge": self.knowledge_table_name,
|
|
399
|
+
"culture": self.culture_table_name,
|
|
400
|
+
"versions": self.versions_table_name,
|
|
401
|
+
}
|
|
402
|
+
return table_map.get(logical_name, logical_name)
|
|
403
|
+
|
|
220
404
|
def _get_table(self, table_type: str, create_table_if_not_found: Optional[bool] = False) -> Optional[Table]:
|
|
221
405
|
if table_type == "sessions":
|
|
222
406
|
self.session_table = self._get_or_create_table(
|
|
@@ -259,6 +443,26 @@ class SqliteDb(BaseDb):
|
|
|
259
443
|
)
|
|
260
444
|
return self.knowledge_table
|
|
261
445
|
|
|
446
|
+
elif table_type == "traces":
|
|
447
|
+
self.traces_table = self._get_or_create_table(
|
|
448
|
+
table_name=self.trace_table_name,
|
|
449
|
+
table_type="traces",
|
|
450
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
451
|
+
)
|
|
452
|
+
return self.traces_table
|
|
453
|
+
|
|
454
|
+
elif table_type == "spans":
|
|
455
|
+
# Ensure traces table exists first (spans has FK to traces)
|
|
456
|
+
if create_table_if_not_found:
|
|
457
|
+
self._get_table(table_type="traces", create_table_if_not_found=True)
|
|
458
|
+
|
|
459
|
+
self.spans_table = self._get_or_create_table(
|
|
460
|
+
table_name=self.span_table_name,
|
|
461
|
+
table_type="spans",
|
|
462
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
463
|
+
)
|
|
464
|
+
return self.spans_table
|
|
465
|
+
|
|
262
466
|
elif table_type == "culture":
|
|
263
467
|
self.culture_table = self._get_or_create_table(
|
|
264
468
|
table_name=self.culture_table_name,
|
|
@@ -267,6 +471,54 @@ class SqliteDb(BaseDb):
|
|
|
267
471
|
)
|
|
268
472
|
return self.culture_table
|
|
269
473
|
|
|
474
|
+
elif table_type == "versions":
|
|
475
|
+
self.versions_table = self._get_or_create_table(
|
|
476
|
+
table_name=self.versions_table_name,
|
|
477
|
+
table_type="versions",
|
|
478
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
479
|
+
)
|
|
480
|
+
return self.versions_table
|
|
481
|
+
|
|
482
|
+
elif table_type == "components":
|
|
483
|
+
self.components_table = self._get_or_create_table(
|
|
484
|
+
table_name=self.components_table_name,
|
|
485
|
+
table_type="components",
|
|
486
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
487
|
+
)
|
|
488
|
+
return self.components_table
|
|
489
|
+
|
|
490
|
+
elif table_type == "component_configs":
|
|
491
|
+
# Ensure components table exists first (configs references components)
|
|
492
|
+
if create_table_if_not_found:
|
|
493
|
+
self._get_table(table_type="components", create_table_if_not_found=True)
|
|
494
|
+
|
|
495
|
+
self.component_configs_table = self._get_or_create_table(
|
|
496
|
+
table_name=self.component_configs_table_name,
|
|
497
|
+
table_type="component_configs",
|
|
498
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
499
|
+
)
|
|
500
|
+
return self.component_configs_table
|
|
501
|
+
|
|
502
|
+
elif table_type == "component_links":
|
|
503
|
+
# Ensure components and component_configs tables exist first
|
|
504
|
+
if create_table_if_not_found:
|
|
505
|
+
self._get_table(table_type="components", create_table_if_not_found=True)
|
|
506
|
+
self._get_table(table_type="component_configs", create_table_if_not_found=True)
|
|
507
|
+
|
|
508
|
+
self.component_links_table = self._get_or_create_table(
|
|
509
|
+
table_name=self.component_links_table_name,
|
|
510
|
+
table_type="component_links",
|
|
511
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
512
|
+
)
|
|
513
|
+
return self.component_links_table
|
|
514
|
+
elif table_type == "learnings":
|
|
515
|
+
self.learnings_table = self._get_or_create_table(
|
|
516
|
+
table_name=self.learnings_table_name,
|
|
517
|
+
table_type="learnings",
|
|
518
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
519
|
+
)
|
|
520
|
+
return self.learnings_table
|
|
521
|
+
|
|
270
522
|
else:
|
|
271
523
|
raise ValueError(f"Unknown table type: '{table_type}'")
|
|
272
524
|
|
|
@@ -300,13 +552,48 @@ class SqliteDb(BaseDb):
|
|
|
300
552
|
|
|
301
553
|
try:
|
|
302
554
|
table = Table(table_name, self.metadata, autoload_with=self.db_engine)
|
|
303
|
-
log_debug(f"Loaded existing table {table_name}")
|
|
304
555
|
return table
|
|
305
556
|
|
|
306
557
|
except Exception as e:
|
|
307
558
|
log_error(f"Error loading existing table {table_name}: {e}")
|
|
308
559
|
raise e
|
|
309
560
|
|
|
561
|
+
def get_latest_schema_version(self, table_name: str):
|
|
562
|
+
"""Get the latest version of the database schema."""
|
|
563
|
+
table = self._get_table(table_type="versions", create_table_if_not_found=True)
|
|
564
|
+
if table is None:
|
|
565
|
+
return "2.0.0"
|
|
566
|
+
with self.Session() as sess:
|
|
567
|
+
stmt = select(table)
|
|
568
|
+
# Latest version for the given table
|
|
569
|
+
stmt = stmt.where(table.c.table_name == table_name)
|
|
570
|
+
stmt = stmt.order_by(table.c.version.desc()).limit(1)
|
|
571
|
+
result = sess.execute(stmt).fetchone()
|
|
572
|
+
if result is None:
|
|
573
|
+
return "2.0.0"
|
|
574
|
+
version_dict = dict(result._mapping)
|
|
575
|
+
return version_dict.get("version") or "2.0.0"
|
|
576
|
+
|
|
577
|
+
def upsert_schema_version(self, table_name: str, version: str) -> None:
|
|
578
|
+
"""Upsert the schema version into the database."""
|
|
579
|
+
table = self._get_table(table_type="versions", create_table_if_not_found=True)
|
|
580
|
+
if table is None:
|
|
581
|
+
return
|
|
582
|
+
current_datetime = datetime.now().isoformat()
|
|
583
|
+
with self.Session() as sess, sess.begin():
|
|
584
|
+
stmt = sqlite.insert(table).values(
|
|
585
|
+
table_name=table_name,
|
|
586
|
+
version=version,
|
|
587
|
+
created_at=current_datetime, # Store as ISO format string
|
|
588
|
+
updated_at=current_datetime,
|
|
589
|
+
)
|
|
590
|
+
# Update version if table_name already exists
|
|
591
|
+
stmt = stmt.on_conflict_do_update(
|
|
592
|
+
index_elements=["table_name"],
|
|
593
|
+
set_=dict(version=version, updated_at=current_datetime),
|
|
594
|
+
)
|
|
595
|
+
sess.execute(stmt)
|
|
596
|
+
|
|
310
597
|
# -- Session methods --
|
|
311
598
|
|
|
312
599
|
def delete_session(self, session_id: str) -> bool:
|
|
@@ -1011,10 +1298,10 @@ class SqliteDb(BaseDb):
|
|
|
1011
1298
|
|
|
1012
1299
|
with self.Session() as sess, sess.begin():
|
|
1013
1300
|
# Select topics from all results
|
|
1014
|
-
stmt = select(
|
|
1301
|
+
stmt = select(table.c.topics)
|
|
1015
1302
|
result = sess.execute(stmt).fetchall()
|
|
1016
|
-
|
|
1017
|
-
return list(set(
|
|
1303
|
+
result = result[0][0]
|
|
1304
|
+
return list(set(result))
|
|
1018
1305
|
|
|
1019
1306
|
except Exception as e:
|
|
1020
1307
|
log_debug(f"Exception reading from memory table: {e}")
|
|
@@ -1116,8 +1403,8 @@ class SqliteDb(BaseDb):
|
|
|
1116
1403
|
if team_id is not None:
|
|
1117
1404
|
stmt = stmt.where(table.c.team_id == team_id)
|
|
1118
1405
|
if topics is not None:
|
|
1119
|
-
|
|
1120
|
-
|
|
1406
|
+
for topic in topics:
|
|
1407
|
+
stmt = stmt.where(func.cast(table.c.topics, String).like(f'%"{topic}"%'))
|
|
1121
1408
|
if search_content is not None:
|
|
1122
1409
|
stmt = stmt.where(table.c.memory.ilike(f"%{search_content}%"))
|
|
1123
1410
|
|
|
@@ -1152,12 +1439,14 @@ class SqliteDb(BaseDb):
|
|
|
1152
1439
|
self,
|
|
1153
1440
|
limit: Optional[int] = None,
|
|
1154
1441
|
page: Optional[int] = None,
|
|
1442
|
+
user_id: Optional[str] = None,
|
|
1155
1443
|
) -> Tuple[List[Dict[str, Any]], int]:
|
|
1156
1444
|
"""Get user memories stats.
|
|
1157
1445
|
|
|
1158
1446
|
Args:
|
|
1159
1447
|
limit (Optional[int]): The maximum number of user stats to return.
|
|
1160
1448
|
page (Optional[int]): The page number.
|
|
1449
|
+
user_id (Optional[str]): User ID for filtering.
|
|
1161
1450
|
|
|
1162
1451
|
Returns:
|
|
1163
1452
|
Tuple[List[Dict[str, Any]], int]: A list of dictionaries containing user stats and total count.
|
|
@@ -1180,19 +1469,20 @@ class SqliteDb(BaseDb):
|
|
|
1180
1469
|
return [], 0
|
|
1181
1470
|
|
|
1182
1471
|
with self.Session() as sess, sess.begin():
|
|
1183
|
-
stmt = (
|
|
1184
|
-
|
|
1185
|
-
|
|
1186
|
-
|
|
1187
|
-
func.max(table.c.updated_at).label("last_memory_updated_at"),
|
|
1188
|
-
)
|
|
1189
|
-
.where(table.c.user_id.is_not(None))
|
|
1190
|
-
.group_by(table.c.user_id)
|
|
1191
|
-
.order_by(func.max(table.c.updated_at).desc())
|
|
1472
|
+
stmt = select(
|
|
1473
|
+
table.c.user_id,
|
|
1474
|
+
func.count(table.c.memory_id).label("total_memories"),
|
|
1475
|
+
func.max(table.c.updated_at).label("last_memory_updated_at"),
|
|
1192
1476
|
)
|
|
1477
|
+
if user_id is not None:
|
|
1478
|
+
stmt = stmt.where(table.c.user_id == user_id)
|
|
1479
|
+
else:
|
|
1480
|
+
stmt = stmt.where(table.c.user_id.is_not(None))
|
|
1481
|
+
stmt = stmt.group_by(table.c.user_id)
|
|
1482
|
+
stmt = stmt.order_by(func.max(table.c.updated_at).desc())
|
|
1193
1483
|
|
|
1194
1484
|
count_stmt = select(func.count()).select_from(stmt.alias())
|
|
1195
|
-
total_count = sess.execute(count_stmt).scalar()
|
|
1485
|
+
total_count = sess.execute(count_stmt).scalar() or 0
|
|
1196
1486
|
|
|
1197
1487
|
# Pagination
|
|
1198
1488
|
if limit is not None:
|
|
@@ -1242,6 +1532,8 @@ class SqliteDb(BaseDb):
|
|
|
1242
1532
|
if memory.memory_id is None:
|
|
1243
1533
|
memory.memory_id = str(uuid4())
|
|
1244
1534
|
|
|
1535
|
+
current_time = int(time.time())
|
|
1536
|
+
|
|
1245
1537
|
with self.Session() as sess, sess.begin():
|
|
1246
1538
|
stmt = sqlite.insert(table).values(
|
|
1247
1539
|
user_id=memory.user_id,
|
|
@@ -1251,7 +1543,9 @@ class SqliteDb(BaseDb):
|
|
|
1251
1543
|
memory=memory.memory,
|
|
1252
1544
|
topics=memory.topics,
|
|
1253
1545
|
input=memory.input,
|
|
1254
|
-
|
|
1546
|
+
feedback=memory.feedback,
|
|
1547
|
+
created_at=memory.created_at,
|
|
1548
|
+
updated_at=memory.created_at,
|
|
1255
1549
|
)
|
|
1256
1550
|
stmt = stmt.on_conflict_do_update( # type: ignore
|
|
1257
1551
|
index_elements=["memory_id"],
|
|
@@ -1259,7 +1553,12 @@ class SqliteDb(BaseDb):
|
|
|
1259
1553
|
memory=memory.memory,
|
|
1260
1554
|
topics=memory.topics,
|
|
1261
1555
|
input=memory.input,
|
|
1262
|
-
|
|
1556
|
+
agent_id=memory.agent_id,
|
|
1557
|
+
team_id=memory.team_id,
|
|
1558
|
+
feedback=memory.feedback,
|
|
1559
|
+
updated_at=current_time,
|
|
1560
|
+
# Preserve created_at on update - don't overwrite existing value
|
|
1561
|
+
created_at=table.c.created_at,
|
|
1263
1562
|
),
|
|
1264
1563
|
).returning(table)
|
|
1265
1564
|
|
|
@@ -1315,12 +1614,14 @@ class SqliteDb(BaseDb):
|
|
|
1315
1614
|
# Prepare bulk data
|
|
1316
1615
|
bulk_data = []
|
|
1317
1616
|
current_time = int(time.time())
|
|
1617
|
+
|
|
1318
1618
|
for memory in memories:
|
|
1319
1619
|
if memory.memory_id is None:
|
|
1320
1620
|
memory.memory_id = str(uuid4())
|
|
1321
1621
|
|
|
1322
1622
|
# Use preserved updated_at if flag is set and value exists, otherwise use current time
|
|
1323
1623
|
updated_at = memory.updated_at if preserve_updated_at else current_time
|
|
1624
|
+
|
|
1324
1625
|
bulk_data.append(
|
|
1325
1626
|
{
|
|
1326
1627
|
"user_id": memory.user_id,
|
|
@@ -1329,6 +1630,9 @@ class SqliteDb(BaseDb):
|
|
|
1329
1630
|
"memory_id": memory.memory_id,
|
|
1330
1631
|
"memory": memory.memory,
|
|
1331
1632
|
"topics": memory.topics,
|
|
1633
|
+
"input": memory.input,
|
|
1634
|
+
"feedback": memory.feedback,
|
|
1635
|
+
"created_at": memory.created_at,
|
|
1332
1636
|
"updated_at": updated_at,
|
|
1333
1637
|
}
|
|
1334
1638
|
)
|
|
@@ -1346,7 +1650,10 @@ class SqliteDb(BaseDb):
|
|
|
1346
1650
|
input=stmt.excluded.input,
|
|
1347
1651
|
agent_id=stmt.excluded.agent_id,
|
|
1348
1652
|
team_id=stmt.excluded.team_id,
|
|
1653
|
+
feedback=stmt.excluded.feedback,
|
|
1349
1654
|
updated_at=stmt.excluded.updated_at,
|
|
1655
|
+
# Preserve created_at on update
|
|
1656
|
+
created_at=table.c.created_at,
|
|
1350
1657
|
),
|
|
1351
1658
|
)
|
|
1352
1659
|
sess.execute(stmt, bulk_data)
|
|
@@ -1998,187 +2305,691 @@ class SqliteDb(BaseDb):
|
|
|
1998
2305
|
log_error(f"Error renaming eval run {eval_run_id}: {e}")
|
|
1999
2306
|
raise e
|
|
2000
2307
|
|
|
2001
|
-
# --
|
|
2002
|
-
|
|
2003
|
-
def migrate_table_from_v1_to_v2(self, v1_db_schema: str, v1_table_name: str, v1_table_type: str):
|
|
2004
|
-
"""Migrate all content in the given table to the right v2 table"""
|
|
2308
|
+
# -- Trace methods --
|
|
2005
2309
|
|
|
2006
|
-
|
|
2007
|
-
|
|
2008
|
-
parse_agent_sessions,
|
|
2009
|
-
parse_memories,
|
|
2010
|
-
parse_team_sessions,
|
|
2011
|
-
parse_workflow_sessions,
|
|
2012
|
-
)
|
|
2310
|
+
def _get_traces_base_query(self, table: Table, spans_table: Optional[Table] = None):
|
|
2311
|
+
"""Build base query for traces with aggregated span counts.
|
|
2013
2312
|
|
|
2014
|
-
|
|
2015
|
-
|
|
2016
|
-
|
|
2017
|
-
db_schema=v1_db_schema,
|
|
2018
|
-
table_name=v1_table_name,
|
|
2019
|
-
)
|
|
2020
|
-
if not old_content:
|
|
2021
|
-
log_info(f"No content to migrate from table {v1_table_name}")
|
|
2022
|
-
return
|
|
2313
|
+
Args:
|
|
2314
|
+
table: The traces table.
|
|
2315
|
+
spans_table: The spans table (optional).
|
|
2023
2316
|
|
|
2024
|
-
|
|
2025
|
-
|
|
2026
|
-
|
|
2027
|
-
|
|
2028
|
-
|
|
2029
|
-
|
|
2030
|
-
|
|
2031
|
-
|
|
2032
|
-
|
|
2033
|
-
|
|
2034
|
-
|
|
2317
|
+
Returns:
|
|
2318
|
+
SQLAlchemy select statement with total_spans and error_count calculated dynamically.
|
|
2319
|
+
"""
|
|
2320
|
+
from sqlalchemy import case, func, literal
|
|
2321
|
+
|
|
2322
|
+
if spans_table is not None:
|
|
2323
|
+
# JOIN with spans table to calculate total_spans and error_count
|
|
2324
|
+
return (
|
|
2325
|
+
select(
|
|
2326
|
+
table,
|
|
2327
|
+
func.coalesce(func.count(spans_table.c.span_id), 0).label("total_spans"),
|
|
2328
|
+
func.coalesce(func.sum(case((spans_table.c.status_code == "ERROR", 1), else_=0)), 0).label(
|
|
2329
|
+
"error_count"
|
|
2330
|
+
),
|
|
2331
|
+
)
|
|
2332
|
+
.select_from(table.outerjoin(spans_table, table.c.trace_id == spans_table.c.trace_id))
|
|
2333
|
+
.group_by(table.c.trace_id)
|
|
2334
|
+
)
|
|
2035
2335
|
else:
|
|
2036
|
-
|
|
2336
|
+
# Fallback if spans table doesn't exist
|
|
2337
|
+
return select(table, literal(0).label("total_spans"), literal(0).label("error_count"))
|
|
2037
2338
|
|
|
2038
|
-
|
|
2039
|
-
|
|
2040
|
-
for session in sessions:
|
|
2041
|
-
self.upsert_session(session)
|
|
2042
|
-
log_info(f"Migrated {len(sessions)} Agent sessions to table: {self.session_table_name}")
|
|
2339
|
+
def _get_trace_component_level_expr(self, workflow_id_col, team_id_col, agent_id_col, name_col):
|
|
2340
|
+
"""Build a SQL CASE expression that returns the component level for a trace.
|
|
2043
2341
|
|
|
2044
|
-
|
|
2045
|
-
|
|
2046
|
-
|
|
2047
|
-
|
|
2342
|
+
Component levels (higher = more important):
|
|
2343
|
+
- 3: Workflow root (.run or .arun with workflow_id)
|
|
2344
|
+
- 2: Team root (.run or .arun with team_id)
|
|
2345
|
+
- 1: Agent root (.run or .arun with agent_id)
|
|
2346
|
+
- 0: Child span (not a root)
|
|
2048
2347
|
|
|
2049
|
-
|
|
2050
|
-
|
|
2051
|
-
|
|
2052
|
-
|
|
2348
|
+
Args:
|
|
2349
|
+
workflow_id_col: SQL column/expression for workflow_id
|
|
2350
|
+
team_id_col: SQL column/expression for team_id
|
|
2351
|
+
agent_id_col: SQL column/expression for agent_id
|
|
2352
|
+
name_col: SQL column/expression for name
|
|
2053
2353
|
|
|
2054
|
-
|
|
2055
|
-
|
|
2056
|
-
|
|
2057
|
-
|
|
2354
|
+
Returns:
|
|
2355
|
+
SQLAlchemy CASE expression returning the component level as an integer.
|
|
2356
|
+
"""
|
|
2357
|
+
from sqlalchemy import and_, case, or_
|
|
2358
|
+
|
|
2359
|
+
is_root_name = or_(name_col.contains(".run"), name_col.contains(".arun"))
|
|
2360
|
+
|
|
2361
|
+
return case(
|
|
2362
|
+
# Workflow root (level 3)
|
|
2363
|
+
(and_(workflow_id_col.isnot(None), is_root_name), 3),
|
|
2364
|
+
# Team root (level 2)
|
|
2365
|
+
(and_(team_id_col.isnot(None), is_root_name), 2),
|
|
2366
|
+
# Agent root (level 1)
|
|
2367
|
+
(and_(agent_id_col.isnot(None), is_root_name), 1),
|
|
2368
|
+
# Child span or unknown (level 0)
|
|
2369
|
+
else_=0,
|
|
2370
|
+
)
|
|
2058
2371
|
|
|
2059
|
-
|
|
2372
|
+
def upsert_trace(self, trace: "Trace") -> None:
|
|
2373
|
+
"""Create or update a single trace record in the database.
|
|
2060
2374
|
|
|
2061
|
-
|
|
2062
|
-
|
|
2375
|
+
Uses INSERT ... ON CONFLICT DO UPDATE (upsert) to handle concurrent inserts
|
|
2376
|
+
atomically and avoid race conditions.
|
|
2063
2377
|
|
|
2064
|
-
|
|
2065
|
-
|
|
2378
|
+
Args:
|
|
2379
|
+
trace: The Trace object to store (one per trace_id).
|
|
2066
2380
|
"""
|
|
2381
|
+
from sqlalchemy import case
|
|
2382
|
+
|
|
2067
2383
|
try:
|
|
2068
|
-
table = self._get_table(table_type="
|
|
2384
|
+
table = self._get_table(table_type="traces", create_table_if_not_found=True)
|
|
2069
2385
|
if table is None:
|
|
2070
2386
|
return
|
|
2071
2387
|
|
|
2388
|
+
trace_dict = trace.to_dict()
|
|
2389
|
+
trace_dict.pop("total_spans", None)
|
|
2390
|
+
trace_dict.pop("error_count", None)
|
|
2391
|
+
|
|
2072
2392
|
with self.Session() as sess, sess.begin():
|
|
2073
|
-
|
|
2393
|
+
# Use upsert to handle concurrent inserts atomically
|
|
2394
|
+
# On conflict, update fields while preserving existing non-null context values
|
|
2395
|
+
# and keeping the earliest start_time
|
|
2396
|
+
insert_stmt = sqlite.insert(table).values(trace_dict)
|
|
2397
|
+
|
|
2398
|
+
# Build component level expressions for comparing trace priority
|
|
2399
|
+
new_level = self._get_trace_component_level_expr(
|
|
2400
|
+
insert_stmt.excluded.workflow_id,
|
|
2401
|
+
insert_stmt.excluded.team_id,
|
|
2402
|
+
insert_stmt.excluded.agent_id,
|
|
2403
|
+
insert_stmt.excluded.name,
|
|
2404
|
+
)
|
|
2405
|
+
existing_level = self._get_trace_component_level_expr(
|
|
2406
|
+
table.c.workflow_id,
|
|
2407
|
+
table.c.team_id,
|
|
2408
|
+
table.c.agent_id,
|
|
2409
|
+
table.c.name,
|
|
2410
|
+
)
|
|
2074
2411
|
|
|
2075
|
-
|
|
2076
|
-
|
|
2412
|
+
# Build the ON CONFLICT DO UPDATE clause
|
|
2413
|
+
# Use MIN for start_time, MAX for end_time to capture full trace duration
|
|
2414
|
+
# SQLite stores timestamps as ISO strings, so string comparison works for ISO format
|
|
2415
|
+
# Duration is calculated as: (MAX(end_time) - MIN(start_time)) in milliseconds
|
|
2416
|
+
# SQLite doesn't have epoch extraction, so we calculate duration using julianday
|
|
2417
|
+
upsert_stmt = insert_stmt.on_conflict_do_update(
|
|
2418
|
+
index_elements=["trace_id"],
|
|
2419
|
+
set_={
|
|
2420
|
+
"end_time": func.max(table.c.end_time, insert_stmt.excluded.end_time),
|
|
2421
|
+
"start_time": func.min(table.c.start_time, insert_stmt.excluded.start_time),
|
|
2422
|
+
# Calculate duration in milliseconds using julianday (SQLite-specific)
|
|
2423
|
+
# julianday returns days, so multiply by 86400000 to get milliseconds
|
|
2424
|
+
"duration_ms": (
|
|
2425
|
+
func.julianday(func.max(table.c.end_time, insert_stmt.excluded.end_time))
|
|
2426
|
+
- func.julianday(func.min(table.c.start_time, insert_stmt.excluded.start_time))
|
|
2427
|
+
)
|
|
2428
|
+
* 86400000,
|
|
2429
|
+
"status": insert_stmt.excluded.status,
|
|
2430
|
+
# Update name only if new trace is from a higher-level component
|
|
2431
|
+
# Priority: workflow (3) > team (2) > agent (1) > child spans (0)
|
|
2432
|
+
"name": case(
|
|
2433
|
+
(new_level > existing_level, insert_stmt.excluded.name),
|
|
2434
|
+
else_=table.c.name,
|
|
2435
|
+
),
|
|
2436
|
+
# Preserve existing non-null context values using COALESCE
|
|
2437
|
+
"run_id": func.coalesce(insert_stmt.excluded.run_id, table.c.run_id),
|
|
2438
|
+
"session_id": func.coalesce(insert_stmt.excluded.session_id, table.c.session_id),
|
|
2439
|
+
"user_id": func.coalesce(insert_stmt.excluded.user_id, table.c.user_id),
|
|
2440
|
+
"agent_id": func.coalesce(insert_stmt.excluded.agent_id, table.c.agent_id),
|
|
2441
|
+
"team_id": func.coalesce(insert_stmt.excluded.team_id, table.c.team_id),
|
|
2442
|
+
"workflow_id": func.coalesce(insert_stmt.excluded.workflow_id, table.c.workflow_id),
|
|
2443
|
+
},
|
|
2444
|
+
)
|
|
2445
|
+
sess.execute(upsert_stmt)
|
|
2077
2446
|
|
|
2078
|
-
|
|
2079
|
-
|
|
2447
|
+
except Exception as e:
|
|
2448
|
+
log_error(f"Error creating trace: {e}")
|
|
2449
|
+
# Don't raise - tracing should not break the main application flow
|
|
2080
2450
|
|
|
2081
|
-
def
|
|
2082
|
-
|
|
2451
|
+
def get_trace(
|
|
2452
|
+
self,
|
|
2453
|
+
trace_id: Optional[str] = None,
|
|
2454
|
+
run_id: Optional[str] = None,
|
|
2455
|
+
):
|
|
2456
|
+
"""Get a single trace by trace_id or other filters.
|
|
2083
2457
|
|
|
2084
2458
|
Args:
|
|
2085
|
-
|
|
2459
|
+
trace_id: The unique trace identifier.
|
|
2460
|
+
run_id: Filter by run ID (returns first match).
|
|
2086
2461
|
|
|
2087
|
-
|
|
2088
|
-
|
|
2462
|
+
Returns:
|
|
2463
|
+
Optional[Trace]: The trace if found, None otherwise.
|
|
2464
|
+
|
|
2465
|
+
Note:
|
|
2466
|
+
If multiple filters are provided, trace_id takes precedence.
|
|
2467
|
+
For other filters, the most recent trace is returned.
|
|
2089
2468
|
"""
|
|
2090
2469
|
try:
|
|
2091
|
-
|
|
2470
|
+
from agno.tracing.schemas import Trace
|
|
2471
|
+
|
|
2472
|
+
table = self._get_table(table_type="traces")
|
|
2092
2473
|
if table is None:
|
|
2093
|
-
return
|
|
2474
|
+
return None
|
|
2094
2475
|
|
|
2095
|
-
|
|
2096
|
-
|
|
2097
|
-
result = sess.execute(delete_stmt)
|
|
2476
|
+
# Get spans table for JOIN
|
|
2477
|
+
spans_table = self._get_table(table_type="spans")
|
|
2098
2478
|
|
|
2099
|
-
|
|
2100
|
-
|
|
2101
|
-
|
|
2479
|
+
with self.Session() as sess:
|
|
2480
|
+
# Build query with aggregated span counts
|
|
2481
|
+
stmt = self._get_traces_base_query(table, spans_table)
|
|
2482
|
+
|
|
2483
|
+
if trace_id:
|
|
2484
|
+
stmt = stmt.where(table.c.trace_id == trace_id)
|
|
2485
|
+
elif run_id:
|
|
2486
|
+
stmt = stmt.where(table.c.run_id == run_id)
|
|
2102
2487
|
else:
|
|
2103
|
-
log_debug(
|
|
2488
|
+
log_debug("get_trace called without any filter parameters")
|
|
2489
|
+
return None
|
|
2490
|
+
|
|
2491
|
+
# Order by most recent and get first result
|
|
2492
|
+
stmt = stmt.order_by(table.c.start_time.desc()).limit(1)
|
|
2493
|
+
result = sess.execute(stmt).fetchone()
|
|
2494
|
+
|
|
2495
|
+
if result:
|
|
2496
|
+
return Trace.from_dict(dict(result._mapping))
|
|
2497
|
+
return None
|
|
2104
2498
|
|
|
2105
2499
|
except Exception as e:
|
|
2106
|
-
log_error(f"Error
|
|
2107
|
-
|
|
2500
|
+
log_error(f"Error getting trace: {e}")
|
|
2501
|
+
return None
|
|
2108
2502
|
|
|
2109
|
-
def
|
|
2110
|
-
self,
|
|
2111
|
-
|
|
2112
|
-
|
|
2503
|
+
def get_traces(
|
|
2504
|
+
self,
|
|
2505
|
+
run_id: Optional[str] = None,
|
|
2506
|
+
session_id: Optional[str] = None,
|
|
2507
|
+
user_id: Optional[str] = None,
|
|
2508
|
+
agent_id: Optional[str] = None,
|
|
2509
|
+
team_id: Optional[str] = None,
|
|
2510
|
+
workflow_id: Optional[str] = None,
|
|
2511
|
+
status: Optional[str] = None,
|
|
2512
|
+
start_time: Optional[datetime] = None,
|
|
2513
|
+
end_time: Optional[datetime] = None,
|
|
2514
|
+
limit: Optional[int] = 20,
|
|
2515
|
+
page: Optional[int] = 1,
|
|
2516
|
+
) -> tuple[List, int]:
|
|
2517
|
+
"""Get traces matching the provided filters with pagination.
|
|
2113
2518
|
|
|
2114
2519
|
Args:
|
|
2115
|
-
|
|
2116
|
-
|
|
2520
|
+
run_id: Filter by run ID.
|
|
2521
|
+
session_id: Filter by session ID.
|
|
2522
|
+
user_id: Filter by user ID.
|
|
2523
|
+
agent_id: Filter by agent ID.
|
|
2524
|
+
team_id: Filter by team ID.
|
|
2525
|
+
workflow_id: Filter by workflow ID.
|
|
2526
|
+
status: Filter by status (OK, ERROR, UNSET).
|
|
2527
|
+
start_time: Filter traces starting after this datetime.
|
|
2528
|
+
end_time: Filter traces ending before this datetime.
|
|
2529
|
+
limit: Maximum number of traces to return per page.
|
|
2530
|
+
page: Page number (1-indexed).
|
|
2117
2531
|
|
|
2118
2532
|
Returns:
|
|
2119
|
-
|
|
2120
|
-
|
|
2121
|
-
Raises:
|
|
2122
|
-
Exception: If an error occurs during retrieval.
|
|
2533
|
+
tuple[List[Trace], int]: Tuple of (list of matching traces, total count).
|
|
2123
2534
|
"""
|
|
2124
2535
|
try:
|
|
2125
|
-
|
|
2126
|
-
if table is None:
|
|
2127
|
-
return None
|
|
2536
|
+
from sqlalchemy import func
|
|
2128
2537
|
|
|
2129
|
-
|
|
2130
|
-
stmt = select(table).where(table.c.id == id)
|
|
2131
|
-
result = sess.execute(stmt).fetchone()
|
|
2132
|
-
if result is None:
|
|
2133
|
-
return None
|
|
2538
|
+
from agno.tracing.schemas import Trace
|
|
2134
2539
|
|
|
2135
|
-
|
|
2136
|
-
|
|
2137
|
-
|
|
2540
|
+
log_debug(
|
|
2541
|
+
f"get_traces called with filters: run_id={run_id}, session_id={session_id}, user_id={user_id}, agent_id={agent_id}, page={page}, limit={limit}"
|
|
2542
|
+
)
|
|
2138
2543
|
|
|
2139
|
-
|
|
2544
|
+
table = self._get_table(table_type="traces")
|
|
2545
|
+
if table is None:
|
|
2546
|
+
log_debug(" Traces table not found")
|
|
2547
|
+
return [], 0
|
|
2548
|
+
|
|
2549
|
+
# Get spans table for JOIN
|
|
2550
|
+
spans_table = self._get_table(table_type="spans")
|
|
2551
|
+
|
|
2552
|
+
with self.Session() as sess:
|
|
2553
|
+
# Build base query with aggregated span counts
|
|
2554
|
+
base_stmt = self._get_traces_base_query(table, spans_table)
|
|
2555
|
+
|
|
2556
|
+
# Apply filters
|
|
2557
|
+
if run_id:
|
|
2558
|
+
base_stmt = base_stmt.where(table.c.run_id == run_id)
|
|
2559
|
+
if session_id:
|
|
2560
|
+
base_stmt = base_stmt.where(table.c.session_id == session_id)
|
|
2561
|
+
if user_id:
|
|
2562
|
+
base_stmt = base_stmt.where(table.c.user_id == user_id)
|
|
2563
|
+
if agent_id:
|
|
2564
|
+
base_stmt = base_stmt.where(table.c.agent_id == agent_id)
|
|
2565
|
+
if team_id:
|
|
2566
|
+
base_stmt = base_stmt.where(table.c.team_id == team_id)
|
|
2567
|
+
if workflow_id:
|
|
2568
|
+
base_stmt = base_stmt.where(table.c.workflow_id == workflow_id)
|
|
2569
|
+
if status:
|
|
2570
|
+
base_stmt = base_stmt.where(table.c.status == status)
|
|
2571
|
+
if start_time:
|
|
2572
|
+
# Convert datetime to ISO string for comparison
|
|
2573
|
+
base_stmt = base_stmt.where(table.c.start_time >= start_time.isoformat())
|
|
2574
|
+
if end_time:
|
|
2575
|
+
# Convert datetime to ISO string for comparison
|
|
2576
|
+
base_stmt = base_stmt.where(table.c.end_time <= end_time.isoformat())
|
|
2577
|
+
|
|
2578
|
+
# Get total count
|
|
2579
|
+
count_stmt = select(func.count()).select_from(base_stmt.alias())
|
|
2580
|
+
total_count = sess.execute(count_stmt).scalar() or 0
|
|
2581
|
+
|
|
2582
|
+
# Apply pagination
|
|
2583
|
+
offset = (page - 1) * limit if page and limit else 0
|
|
2584
|
+
paginated_stmt = base_stmt.order_by(table.c.start_time.desc()).limit(limit).offset(offset)
|
|
2585
|
+
|
|
2586
|
+
results = sess.execute(paginated_stmt).fetchall()
|
|
2587
|
+
|
|
2588
|
+
traces = [Trace.from_dict(dict(row._mapping)) for row in results]
|
|
2589
|
+
return traces, total_count
|
|
2140
2590
|
|
|
2141
2591
|
except Exception as e:
|
|
2142
|
-
log_error(f"
|
|
2143
|
-
|
|
2592
|
+
log_error(f"Error getting traces: {e}")
|
|
2593
|
+
return [], 0
|
|
2144
2594
|
|
|
2145
|
-
def
|
|
2595
|
+
def get_trace_stats(
|
|
2146
2596
|
self,
|
|
2147
|
-
|
|
2597
|
+
user_id: Optional[str] = None,
|
|
2148
2598
|
agent_id: Optional[str] = None,
|
|
2149
2599
|
team_id: Optional[str] = None,
|
|
2150
|
-
|
|
2151
|
-
|
|
2152
|
-
|
|
2153
|
-
|
|
2154
|
-
|
|
2155
|
-
) ->
|
|
2156
|
-
"""Get
|
|
2600
|
+
workflow_id: Optional[str] = None,
|
|
2601
|
+
start_time: Optional[datetime] = None,
|
|
2602
|
+
end_time: Optional[datetime] = None,
|
|
2603
|
+
limit: Optional[int] = 20,
|
|
2604
|
+
page: Optional[int] = 1,
|
|
2605
|
+
) -> tuple[List[Dict[str, Any]], int]:
|
|
2606
|
+
"""Get trace statistics grouped by session.
|
|
2157
2607
|
|
|
2158
2608
|
Args:
|
|
2159
|
-
|
|
2160
|
-
agent_id
|
|
2161
|
-
team_id
|
|
2162
|
-
|
|
2163
|
-
|
|
2164
|
-
|
|
2165
|
-
|
|
2166
|
-
|
|
2609
|
+
user_id: Filter by user ID.
|
|
2610
|
+
agent_id: Filter by agent ID.
|
|
2611
|
+
team_id: Filter by team ID.
|
|
2612
|
+
workflow_id: Filter by workflow ID.
|
|
2613
|
+
start_time: Filter sessions with traces created after this datetime.
|
|
2614
|
+
end_time: Filter sessions with traces created before this datetime.
|
|
2615
|
+
limit: Maximum number of sessions to return per page.
|
|
2616
|
+
page: Page number (1-indexed).
|
|
2167
2617
|
|
|
2168
2618
|
Returns:
|
|
2169
|
-
|
|
2170
|
-
- When deserialize=True: List of CulturalNotion objects
|
|
2171
|
-
- When deserialize=False: List of CulturalNotion dictionaries and total count
|
|
2172
|
-
|
|
2173
|
-
Raises:
|
|
2174
|
-
Exception: If an error occurs during retrieval.
|
|
2619
|
+
tuple[List[Dict], int]: Tuple of (list of session stats dicts, total count).
|
|
2175
2620
|
"""
|
|
2176
2621
|
try:
|
|
2177
|
-
|
|
2622
|
+
from sqlalchemy import func
|
|
2623
|
+
|
|
2624
|
+
table = self._get_table(table_type="traces")
|
|
2178
2625
|
if table is None:
|
|
2179
|
-
|
|
2626
|
+
log_debug("Traces table not found")
|
|
2627
|
+
return [], 0
|
|
2180
2628
|
|
|
2181
|
-
with self.Session() as sess
|
|
2629
|
+
with self.Session() as sess:
|
|
2630
|
+
# Build base query grouped by session_id
|
|
2631
|
+
base_stmt = (
|
|
2632
|
+
select(
|
|
2633
|
+
table.c.session_id,
|
|
2634
|
+
table.c.user_id,
|
|
2635
|
+
table.c.agent_id,
|
|
2636
|
+
table.c.team_id,
|
|
2637
|
+
table.c.workflow_id,
|
|
2638
|
+
func.count(table.c.trace_id).label("total_traces"),
|
|
2639
|
+
func.min(table.c.created_at).label("first_trace_at"),
|
|
2640
|
+
func.max(table.c.created_at).label("last_trace_at"),
|
|
2641
|
+
)
|
|
2642
|
+
.where(table.c.session_id.isnot(None)) # Only sessions with session_id
|
|
2643
|
+
.group_by(
|
|
2644
|
+
table.c.session_id, table.c.user_id, table.c.agent_id, table.c.team_id, table.c.workflow_id
|
|
2645
|
+
)
|
|
2646
|
+
)
|
|
2647
|
+
|
|
2648
|
+
# Apply filters
|
|
2649
|
+
if user_id:
|
|
2650
|
+
base_stmt = base_stmt.where(table.c.user_id == user_id)
|
|
2651
|
+
if workflow_id:
|
|
2652
|
+
base_stmt = base_stmt.where(table.c.workflow_id == workflow_id)
|
|
2653
|
+
if team_id:
|
|
2654
|
+
base_stmt = base_stmt.where(table.c.team_id == team_id)
|
|
2655
|
+
if agent_id:
|
|
2656
|
+
base_stmt = base_stmt.where(table.c.agent_id == agent_id)
|
|
2657
|
+
if start_time:
|
|
2658
|
+
# Convert datetime to ISO string for comparison
|
|
2659
|
+
base_stmt = base_stmt.where(table.c.created_at >= start_time.isoformat())
|
|
2660
|
+
if end_time:
|
|
2661
|
+
# Convert datetime to ISO string for comparison
|
|
2662
|
+
base_stmt = base_stmt.where(table.c.created_at <= end_time.isoformat())
|
|
2663
|
+
|
|
2664
|
+
# Get total count of sessions
|
|
2665
|
+
count_stmt = select(func.count()).select_from(base_stmt.alias())
|
|
2666
|
+
total_count = sess.execute(count_stmt).scalar() or 0
|
|
2667
|
+
|
|
2668
|
+
# Apply pagination and ordering
|
|
2669
|
+
offset = (page - 1) * limit if page and limit else 0
|
|
2670
|
+
paginated_stmt = base_stmt.order_by(func.max(table.c.created_at).desc()).limit(limit).offset(offset)
|
|
2671
|
+
|
|
2672
|
+
results = sess.execute(paginated_stmt).fetchall()
|
|
2673
|
+
|
|
2674
|
+
# Convert to list of dicts with datetime objects
|
|
2675
|
+
from datetime import datetime
|
|
2676
|
+
|
|
2677
|
+
stats_list = []
|
|
2678
|
+
for row in results:
|
|
2679
|
+
# Convert ISO strings to datetime objects
|
|
2680
|
+
first_trace_at_str = row.first_trace_at
|
|
2681
|
+
last_trace_at_str = row.last_trace_at
|
|
2682
|
+
|
|
2683
|
+
# Parse ISO format strings to datetime objects
|
|
2684
|
+
first_trace_at = datetime.fromisoformat(first_trace_at_str.replace("Z", "+00:00"))
|
|
2685
|
+
last_trace_at = datetime.fromisoformat(last_trace_at_str.replace("Z", "+00:00"))
|
|
2686
|
+
|
|
2687
|
+
stats_list.append(
|
|
2688
|
+
{
|
|
2689
|
+
"session_id": row.session_id,
|
|
2690
|
+
"user_id": row.user_id,
|
|
2691
|
+
"agent_id": row.agent_id,
|
|
2692
|
+
"team_id": row.team_id,
|
|
2693
|
+
"workflow_id": row.workflow_id,
|
|
2694
|
+
"total_traces": row.total_traces,
|
|
2695
|
+
"first_trace_at": first_trace_at,
|
|
2696
|
+
"last_trace_at": last_trace_at,
|
|
2697
|
+
}
|
|
2698
|
+
)
|
|
2699
|
+
|
|
2700
|
+
return stats_list, total_count
|
|
2701
|
+
|
|
2702
|
+
except Exception as e:
|
|
2703
|
+
log_error(f"Error getting trace stats: {e}")
|
|
2704
|
+
return [], 0
|
|
2705
|
+
|
|
2706
|
+
# -- Span methods --
|
|
2707
|
+
|
|
2708
|
+
def create_span(self, span: "Span") -> None:
|
|
2709
|
+
"""Create a single span in the database.
|
|
2710
|
+
|
|
2711
|
+
Args:
|
|
2712
|
+
span: The Span object to store.
|
|
2713
|
+
"""
|
|
2714
|
+
try:
|
|
2715
|
+
table = self._get_table(table_type="spans", create_table_if_not_found=True)
|
|
2716
|
+
if table is None:
|
|
2717
|
+
return
|
|
2718
|
+
|
|
2719
|
+
with self.Session() as sess, sess.begin():
|
|
2720
|
+
stmt = sqlite.insert(table).values(span.to_dict())
|
|
2721
|
+
sess.execute(stmt)
|
|
2722
|
+
|
|
2723
|
+
except Exception as e:
|
|
2724
|
+
log_error(f"Error creating span: {e}")
|
|
2725
|
+
|
|
2726
|
+
def create_spans(self, spans: List) -> None:
|
|
2727
|
+
"""Create multiple spans in the database as a batch.
|
|
2728
|
+
|
|
2729
|
+
Args:
|
|
2730
|
+
spans: List of Span objects to store.
|
|
2731
|
+
"""
|
|
2732
|
+
if not spans:
|
|
2733
|
+
return
|
|
2734
|
+
|
|
2735
|
+
try:
|
|
2736
|
+
table = self._get_table(table_type="spans", create_table_if_not_found=True)
|
|
2737
|
+
if table is None:
|
|
2738
|
+
return
|
|
2739
|
+
|
|
2740
|
+
with self.Session() as sess, sess.begin():
|
|
2741
|
+
for span in spans:
|
|
2742
|
+
stmt = sqlite.insert(table).values(span.to_dict())
|
|
2743
|
+
sess.execute(stmt)
|
|
2744
|
+
|
|
2745
|
+
except Exception as e:
|
|
2746
|
+
log_error(f"Error creating spans batch: {e}")
|
|
2747
|
+
|
|
2748
|
+
def get_span(self, span_id: str):
|
|
2749
|
+
"""Get a single span by its span_id.
|
|
2750
|
+
|
|
2751
|
+
Args:
|
|
2752
|
+
span_id: The unique span identifier.
|
|
2753
|
+
|
|
2754
|
+
Returns:
|
|
2755
|
+
Optional[Span]: The span if found, None otherwise.
|
|
2756
|
+
"""
|
|
2757
|
+
try:
|
|
2758
|
+
from agno.tracing.schemas import Span
|
|
2759
|
+
|
|
2760
|
+
table = self._get_table(table_type="spans")
|
|
2761
|
+
if table is None:
|
|
2762
|
+
return None
|
|
2763
|
+
|
|
2764
|
+
with self.Session() as sess:
|
|
2765
|
+
stmt = table.select().where(table.c.span_id == span_id)
|
|
2766
|
+
result = sess.execute(stmt).fetchone()
|
|
2767
|
+
if result:
|
|
2768
|
+
return Span.from_dict(dict(result._mapping))
|
|
2769
|
+
return None
|
|
2770
|
+
|
|
2771
|
+
except Exception as e:
|
|
2772
|
+
log_error(f"Error getting span: {e}")
|
|
2773
|
+
return None
|
|
2774
|
+
|
|
2775
|
+
def get_spans(
|
|
2776
|
+
self,
|
|
2777
|
+
trace_id: Optional[str] = None,
|
|
2778
|
+
parent_span_id: Optional[str] = None,
|
|
2779
|
+
) -> List:
|
|
2780
|
+
"""Get spans matching the provided filters.
|
|
2781
|
+
|
|
2782
|
+
Args:
|
|
2783
|
+
trace_id: Filter by trace ID.
|
|
2784
|
+
parent_span_id: Filter by parent span ID.
|
|
2785
|
+
|
|
2786
|
+
Returns:
|
|
2787
|
+
List[Span]: List of matching spans.
|
|
2788
|
+
"""
|
|
2789
|
+
try:
|
|
2790
|
+
from agno.tracing.schemas import Span
|
|
2791
|
+
|
|
2792
|
+
table = self._get_table(table_type="spans")
|
|
2793
|
+
if table is None:
|
|
2794
|
+
return []
|
|
2795
|
+
|
|
2796
|
+
with self.Session() as sess:
|
|
2797
|
+
stmt = table.select()
|
|
2798
|
+
|
|
2799
|
+
# Apply filters
|
|
2800
|
+
if trace_id:
|
|
2801
|
+
stmt = stmt.where(table.c.trace_id == trace_id)
|
|
2802
|
+
if parent_span_id:
|
|
2803
|
+
stmt = stmt.where(table.c.parent_span_id == parent_span_id)
|
|
2804
|
+
|
|
2805
|
+
results = sess.execute(stmt).fetchall()
|
|
2806
|
+
return [Span.from_dict(dict(row._mapping)) for row in results]
|
|
2807
|
+
|
|
2808
|
+
except Exception as e:
|
|
2809
|
+
log_error(f"Error getting spans: {e}")
|
|
2810
|
+
return []
|
|
2811
|
+
|
|
2812
|
+
# -- Migrations --
|
|
2813
|
+
|
|
2814
|
+
def migrate_table_from_v1_to_v2(self, v1_db_schema: str, v1_table_name: str, v1_table_type: str):
|
|
2815
|
+
"""Migrate all content in the given table to the right v2 table"""
|
|
2816
|
+
|
|
2817
|
+
from agno.db.migrations.v1_to_v2 import (
|
|
2818
|
+
get_all_table_content,
|
|
2819
|
+
parse_agent_sessions,
|
|
2820
|
+
parse_memories,
|
|
2821
|
+
parse_team_sessions,
|
|
2822
|
+
parse_workflow_sessions,
|
|
2823
|
+
)
|
|
2824
|
+
|
|
2825
|
+
# Get all content from the old table
|
|
2826
|
+
old_content: list[dict[str, Any]] = get_all_table_content(
|
|
2827
|
+
db=self,
|
|
2828
|
+
db_schema=v1_db_schema,
|
|
2829
|
+
table_name=v1_table_name,
|
|
2830
|
+
)
|
|
2831
|
+
if not old_content:
|
|
2832
|
+
log_info(f"No content to migrate from table {v1_table_name}")
|
|
2833
|
+
return
|
|
2834
|
+
|
|
2835
|
+
# Parse the content into the new format
|
|
2836
|
+
memories: List[UserMemory] = []
|
|
2837
|
+
sessions: Sequence[Union[AgentSession, TeamSession, WorkflowSession]] = []
|
|
2838
|
+
if v1_table_type == "agent_sessions":
|
|
2839
|
+
sessions = parse_agent_sessions(old_content)
|
|
2840
|
+
elif v1_table_type == "team_sessions":
|
|
2841
|
+
sessions = parse_team_sessions(old_content)
|
|
2842
|
+
elif v1_table_type == "workflow_sessions":
|
|
2843
|
+
sessions = parse_workflow_sessions(old_content)
|
|
2844
|
+
elif v1_table_type == "memories":
|
|
2845
|
+
memories = parse_memories(old_content)
|
|
2846
|
+
else:
|
|
2847
|
+
raise ValueError(f"Invalid table type: {v1_table_type}")
|
|
2848
|
+
|
|
2849
|
+
# Insert the new content into the new table
|
|
2850
|
+
if v1_table_type == "agent_sessions":
|
|
2851
|
+
for session in sessions:
|
|
2852
|
+
self.upsert_session(session)
|
|
2853
|
+
log_info(f"Migrated {len(sessions)} Agent sessions to table: {self.session_table_name}")
|
|
2854
|
+
|
|
2855
|
+
elif v1_table_type == "team_sessions":
|
|
2856
|
+
for session in sessions:
|
|
2857
|
+
self.upsert_session(session)
|
|
2858
|
+
log_info(f"Migrated {len(sessions)} Team sessions to table: {self.session_table_name}")
|
|
2859
|
+
|
|
2860
|
+
elif v1_table_type == "workflow_sessions":
|
|
2861
|
+
for session in sessions:
|
|
2862
|
+
self.upsert_session(session)
|
|
2863
|
+
log_info(f"Migrated {len(sessions)} Workflow sessions to table: {self.session_table_name}")
|
|
2864
|
+
|
|
2865
|
+
elif v1_table_type == "memories":
|
|
2866
|
+
for memory in memories:
|
|
2867
|
+
self.upsert_user_memory(memory)
|
|
2868
|
+
log_info(f"Migrated {len(memories)} memories to table: {self.memory_table}")
|
|
2869
|
+
|
|
2870
|
+
# -- Culture methods --
|
|
2871
|
+
|
|
2872
|
+
def clear_cultural_knowledge(self) -> None:
|
|
2873
|
+
"""Delete all cultural artifacts from the database.
|
|
2874
|
+
|
|
2875
|
+
Raises:
|
|
2876
|
+
Exception: If an error occurs during deletion.
|
|
2877
|
+
"""
|
|
2878
|
+
try:
|
|
2879
|
+
table = self._get_table(table_type="culture")
|
|
2880
|
+
if table is None:
|
|
2881
|
+
return
|
|
2882
|
+
|
|
2883
|
+
with self.Session() as sess, sess.begin():
|
|
2884
|
+
sess.execute(table.delete())
|
|
2885
|
+
|
|
2886
|
+
except Exception as e:
|
|
2887
|
+
from agno.utils.log import log_warning
|
|
2888
|
+
|
|
2889
|
+
log_warning(f"Exception deleting all cultural artifacts: {e}")
|
|
2890
|
+
raise e
|
|
2891
|
+
|
|
2892
|
+
def delete_cultural_knowledge(self, id: str) -> None:
|
|
2893
|
+
"""Delete a cultural artifact from the database.
|
|
2894
|
+
|
|
2895
|
+
Args:
|
|
2896
|
+
id (str): The ID of the cultural artifact to delete.
|
|
2897
|
+
|
|
2898
|
+
Raises:
|
|
2899
|
+
Exception: If an error occurs during deletion.
|
|
2900
|
+
"""
|
|
2901
|
+
try:
|
|
2902
|
+
table = self._get_table(table_type="culture")
|
|
2903
|
+
if table is None:
|
|
2904
|
+
return
|
|
2905
|
+
|
|
2906
|
+
with self.Session() as sess, sess.begin():
|
|
2907
|
+
delete_stmt = table.delete().where(table.c.id == id)
|
|
2908
|
+
result = sess.execute(delete_stmt)
|
|
2909
|
+
|
|
2910
|
+
success = result.rowcount > 0
|
|
2911
|
+
if success:
|
|
2912
|
+
log_debug(f"Successfully deleted cultural artifact id: {id}")
|
|
2913
|
+
else:
|
|
2914
|
+
log_debug(f"No cultural artifact found with id: {id}")
|
|
2915
|
+
|
|
2916
|
+
except Exception as e:
|
|
2917
|
+
log_error(f"Error deleting cultural artifact: {e}")
|
|
2918
|
+
raise e
|
|
2919
|
+
|
|
2920
|
+
def get_cultural_knowledge(
|
|
2921
|
+
self, id: str, deserialize: Optional[bool] = True
|
|
2922
|
+
) -> Optional[Union[CulturalKnowledge, Dict[str, Any]]]:
|
|
2923
|
+
"""Get a cultural artifact from the database.
|
|
2924
|
+
|
|
2925
|
+
Args:
|
|
2926
|
+
id (str): The ID of the cultural artifact to get.
|
|
2927
|
+
deserialize (Optional[bool]): Whether to serialize the cultural artifact. Defaults to True.
|
|
2928
|
+
|
|
2929
|
+
Returns:
|
|
2930
|
+
Optional[CulturalKnowledge]: The cultural artifact, or None if it doesn't exist.
|
|
2931
|
+
|
|
2932
|
+
Raises:
|
|
2933
|
+
Exception: If an error occurs during retrieval.
|
|
2934
|
+
"""
|
|
2935
|
+
try:
|
|
2936
|
+
table = self._get_table(table_type="culture")
|
|
2937
|
+
if table is None:
|
|
2938
|
+
return None
|
|
2939
|
+
|
|
2940
|
+
with self.Session() as sess, sess.begin():
|
|
2941
|
+
stmt = select(table).where(table.c.id == id)
|
|
2942
|
+
result = sess.execute(stmt).fetchone()
|
|
2943
|
+
if result is None:
|
|
2944
|
+
return None
|
|
2945
|
+
|
|
2946
|
+
db_row = dict(result._mapping)
|
|
2947
|
+
if not db_row or not deserialize:
|
|
2948
|
+
return db_row
|
|
2949
|
+
|
|
2950
|
+
return deserialize_cultural_knowledge_from_db(db_row)
|
|
2951
|
+
|
|
2952
|
+
except Exception as e:
|
|
2953
|
+
log_error(f"Exception reading from cultural artifacts table: {e}")
|
|
2954
|
+
raise e
|
|
2955
|
+
|
|
2956
|
+
def get_all_cultural_knowledge(
|
|
2957
|
+
self,
|
|
2958
|
+
name: Optional[str] = None,
|
|
2959
|
+
agent_id: Optional[str] = None,
|
|
2960
|
+
team_id: Optional[str] = None,
|
|
2961
|
+
limit: Optional[int] = None,
|
|
2962
|
+
page: Optional[int] = None,
|
|
2963
|
+
sort_by: Optional[str] = None,
|
|
2964
|
+
sort_order: Optional[str] = None,
|
|
2965
|
+
deserialize: Optional[bool] = True,
|
|
2966
|
+
) -> Union[List[CulturalKnowledge], Tuple[List[Dict[str, Any]], int]]:
|
|
2967
|
+
"""Get all cultural artifacts from the database as CulturalNotion objects.
|
|
2968
|
+
|
|
2969
|
+
Args:
|
|
2970
|
+
name (Optional[str]): The name of the cultural artifact to filter by.
|
|
2971
|
+
agent_id (Optional[str]): The ID of the agent to filter by.
|
|
2972
|
+
team_id (Optional[str]): The ID of the team to filter by.
|
|
2973
|
+
limit (Optional[int]): The maximum number of cultural artifacts to return.
|
|
2974
|
+
page (Optional[int]): The page number.
|
|
2975
|
+
sort_by (Optional[str]): The column to sort by.
|
|
2976
|
+
sort_order (Optional[str]): The order to sort by.
|
|
2977
|
+
deserialize (Optional[bool]): Whether to serialize the cultural artifacts. Defaults to True.
|
|
2978
|
+
|
|
2979
|
+
Returns:
|
|
2980
|
+
Union[List[CulturalKnowledge], Tuple[List[Dict[str, Any]], int]]:
|
|
2981
|
+
- When deserialize=True: List of CulturalNotion objects
|
|
2982
|
+
- When deserialize=False: List of CulturalNotion dictionaries and total count
|
|
2983
|
+
|
|
2984
|
+
Raises:
|
|
2985
|
+
Exception: If an error occurs during retrieval.
|
|
2986
|
+
"""
|
|
2987
|
+
try:
|
|
2988
|
+
table = self._get_table(table_type="culture")
|
|
2989
|
+
if table is None:
|
|
2990
|
+
return [] if deserialize else ([], 0)
|
|
2991
|
+
|
|
2992
|
+
with self.Session() as sess, sess.begin():
|
|
2182
2993
|
stmt = select(table)
|
|
2183
2994
|
|
|
2184
2995
|
# Filtering
|
|
@@ -2286,3 +3097,1230 @@ class SqliteDb(BaseDb):
|
|
|
2286
3097
|
except Exception as e:
|
|
2287
3098
|
log_error(f"Error upserting cultural knowledge: {e}")
|
|
2288
3099
|
raise e
|
|
3100
|
+
|
|
3101
|
+
# --- Components ---
|
|
3102
|
+
def get_component(
|
|
3103
|
+
self,
|
|
3104
|
+
component_id: str,
|
|
3105
|
+
component_type: Optional[ComponentType] = None,
|
|
3106
|
+
) -> Optional[Dict[str, Any]]:
|
|
3107
|
+
"""Get a component by ID.
|
|
3108
|
+
|
|
3109
|
+
Args:
|
|
3110
|
+
component_id: The component ID.
|
|
3111
|
+
component_type: Optional type filter (agent|team|workflow).
|
|
3112
|
+
|
|
3113
|
+
Returns:
|
|
3114
|
+
Component dictionary or None if not found.
|
|
3115
|
+
"""
|
|
3116
|
+
try:
|
|
3117
|
+
table = self._get_table(table_type="components")
|
|
3118
|
+
if table is None:
|
|
3119
|
+
return None
|
|
3120
|
+
|
|
3121
|
+
with self.Session() as sess:
|
|
3122
|
+
stmt = select(table).where(
|
|
3123
|
+
table.c.component_id == component_id,
|
|
3124
|
+
table.c.deleted_at.is_(None),
|
|
3125
|
+
)
|
|
3126
|
+
if component_type is not None:
|
|
3127
|
+
stmt = stmt.where(table.c.component_type == component_type.value)
|
|
3128
|
+
|
|
3129
|
+
result = sess.execute(stmt).fetchone()
|
|
3130
|
+
return dict(result._mapping) if result else None
|
|
3131
|
+
|
|
3132
|
+
except Exception as e:
|
|
3133
|
+
log_error(f"Error getting component: {e}")
|
|
3134
|
+
raise
|
|
3135
|
+
|
|
3136
|
+
def upsert_component(
|
|
3137
|
+
self,
|
|
3138
|
+
component_id: str,
|
|
3139
|
+
component_type: Optional[ComponentType] = None,
|
|
3140
|
+
name: Optional[str] = None,
|
|
3141
|
+
description: Optional[str] = None,
|
|
3142
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
3143
|
+
) -> Dict[str, Any]:
|
|
3144
|
+
"""Create or update a component.
|
|
3145
|
+
|
|
3146
|
+
Args:
|
|
3147
|
+
component_id: Unique identifier.
|
|
3148
|
+
component_type: Type (agent|team|workflow). Required for create, optional for update.
|
|
3149
|
+
name: Display name.
|
|
3150
|
+
description: Optional description.
|
|
3151
|
+
metadata: Optional metadata dict.
|
|
3152
|
+
|
|
3153
|
+
Returns:
|
|
3154
|
+
Created/updated component dictionary.
|
|
3155
|
+
|
|
3156
|
+
Raises:
|
|
3157
|
+
ValueError: If creating and component_type is not provided.
|
|
3158
|
+
"""
|
|
3159
|
+
try:
|
|
3160
|
+
table = self._get_table(table_type="components", create_table_if_not_found=True)
|
|
3161
|
+
if table is None:
|
|
3162
|
+
raise ValueError("Components table not found")
|
|
3163
|
+
|
|
3164
|
+
with self.Session() as sess, sess.begin():
|
|
3165
|
+
existing = sess.execute(select(table).where(table.c.component_id == component_id)).fetchone()
|
|
3166
|
+
|
|
3167
|
+
if existing is None:
|
|
3168
|
+
# Create new component
|
|
3169
|
+
if component_type is None:
|
|
3170
|
+
raise ValueError("component_type is required when creating a new component")
|
|
3171
|
+
|
|
3172
|
+
sess.execute(
|
|
3173
|
+
table.insert().values(
|
|
3174
|
+
component_id=component_id,
|
|
3175
|
+
component_type=component_type.value if hasattr(component_type, "value") else component_type,
|
|
3176
|
+
name=name or component_id,
|
|
3177
|
+
description=description,
|
|
3178
|
+
current_version=None,
|
|
3179
|
+
metadata=metadata,
|
|
3180
|
+
created_at=int(time.time()),
|
|
3181
|
+
)
|
|
3182
|
+
)
|
|
3183
|
+
log_debug(f"Created component {component_id}")
|
|
3184
|
+
|
|
3185
|
+
elif existing.deleted_at is not None:
|
|
3186
|
+
# Reactivate soft-deleted
|
|
3187
|
+
if component_type is None:
|
|
3188
|
+
raise ValueError("component_type is required when reactivating a deleted component")
|
|
3189
|
+
|
|
3190
|
+
sess.execute(
|
|
3191
|
+
table.update()
|
|
3192
|
+
.where(table.c.component_id == component_id)
|
|
3193
|
+
.values(
|
|
3194
|
+
component_type=component_type.value if hasattr(component_type, "value") else component_type,
|
|
3195
|
+
name=name or component_id,
|
|
3196
|
+
description=description,
|
|
3197
|
+
current_version=None,
|
|
3198
|
+
metadata=metadata,
|
|
3199
|
+
updated_at=int(time.time()),
|
|
3200
|
+
deleted_at=None,
|
|
3201
|
+
)
|
|
3202
|
+
)
|
|
3203
|
+
log_debug(f"Reactivated component {component_id}")
|
|
3204
|
+
|
|
3205
|
+
else:
|
|
3206
|
+
# Update existing
|
|
3207
|
+
updates: Dict[str, Any] = {"updated_at": int(time.time())}
|
|
3208
|
+
if component_type is not None:
|
|
3209
|
+
updates["component_type"] = (
|
|
3210
|
+
component_type.value if hasattr(component_type, "value") else component_type
|
|
3211
|
+
)
|
|
3212
|
+
if name is not None:
|
|
3213
|
+
updates["name"] = name
|
|
3214
|
+
if description is not None:
|
|
3215
|
+
updates["description"] = description
|
|
3216
|
+
if metadata is not None:
|
|
3217
|
+
updates["metadata"] = metadata
|
|
3218
|
+
|
|
3219
|
+
sess.execute(table.update().where(table.c.component_id == component_id).values(**updates))
|
|
3220
|
+
log_debug(f"Updated component {component_id}")
|
|
3221
|
+
|
|
3222
|
+
result = self.get_component(component_id)
|
|
3223
|
+
if result is None:
|
|
3224
|
+
raise ValueError(f"Failed to get component {component_id} after upsert")
|
|
3225
|
+
return result
|
|
3226
|
+
|
|
3227
|
+
except Exception as e:
|
|
3228
|
+
log_error(f"Error upserting component: {e}")
|
|
3229
|
+
raise
|
|
3230
|
+
|
|
3231
|
+
def delete_component(
|
|
3232
|
+
self,
|
|
3233
|
+
component_id: str,
|
|
3234
|
+
hard_delete: bool = False,
|
|
3235
|
+
) -> bool:
|
|
3236
|
+
"""Delete a component and all its configs/links.
|
|
3237
|
+
|
|
3238
|
+
Args:
|
|
3239
|
+
component_id: The component ID.
|
|
3240
|
+
hard_delete: If True, permanently delete. Otherwise soft-delete.
|
|
3241
|
+
|
|
3242
|
+
Returns:
|
|
3243
|
+
True if deleted, False if not found.
|
|
3244
|
+
"""
|
|
3245
|
+
try:
|
|
3246
|
+
components_table = self._get_table(table_type="components")
|
|
3247
|
+
configs_table = self._get_table(table_type="component_configs")
|
|
3248
|
+
links_table = self._get_table(table_type="component_links")
|
|
3249
|
+
|
|
3250
|
+
if components_table is None:
|
|
3251
|
+
return False
|
|
3252
|
+
|
|
3253
|
+
with self.Session() as sess, sess.begin():
|
|
3254
|
+
if hard_delete:
|
|
3255
|
+
# Delete links where this component is parent or child
|
|
3256
|
+
if links_table is not None:
|
|
3257
|
+
sess.execute(links_table.delete().where(links_table.c.parent_component_id == component_id))
|
|
3258
|
+
sess.execute(links_table.delete().where(links_table.c.child_component_id == component_id))
|
|
3259
|
+
# Delete configs
|
|
3260
|
+
if configs_table is not None:
|
|
3261
|
+
sess.execute(configs_table.delete().where(configs_table.c.component_id == component_id))
|
|
3262
|
+
# Delete component
|
|
3263
|
+
result = sess.execute(
|
|
3264
|
+
components_table.delete().where(components_table.c.component_id == component_id)
|
|
3265
|
+
)
|
|
3266
|
+
else:
|
|
3267
|
+
# Soft delete
|
|
3268
|
+
now = int(time.time())
|
|
3269
|
+
result = sess.execute(
|
|
3270
|
+
components_table.update()
|
|
3271
|
+
.where(components_table.c.component_id == component_id)
|
|
3272
|
+
.values(deleted_at=now)
|
|
3273
|
+
)
|
|
3274
|
+
|
|
3275
|
+
return result.rowcount > 0
|
|
3276
|
+
|
|
3277
|
+
except Exception as e:
|
|
3278
|
+
log_error(f"Error deleting component: {e}")
|
|
3279
|
+
raise
|
|
3280
|
+
|
|
3281
|
+
def list_components(
|
|
3282
|
+
self,
|
|
3283
|
+
component_type: Optional[ComponentType] = None,
|
|
3284
|
+
include_deleted: bool = False,
|
|
3285
|
+
limit: int = 20,
|
|
3286
|
+
offset: int = 0,
|
|
3287
|
+
) -> Tuple[List[Dict[str, Any]], int]:
|
|
3288
|
+
"""List components with pagination.
|
|
3289
|
+
|
|
3290
|
+
Args:
|
|
3291
|
+
component_type: Filter by type (agent|team|workflow).
|
|
3292
|
+
include_deleted: Include soft-deleted components.
|
|
3293
|
+
limit: Maximum number of items to return.
|
|
3294
|
+
offset: Number of items to skip.
|
|
3295
|
+
|
|
3296
|
+
Returns:
|
|
3297
|
+
Tuple of (list of component dicts, total count).
|
|
3298
|
+
"""
|
|
3299
|
+
try:
|
|
3300
|
+
table = self._get_table(table_type="components")
|
|
3301
|
+
if table is None:
|
|
3302
|
+
return [], 0
|
|
3303
|
+
|
|
3304
|
+
with self.Session() as sess:
|
|
3305
|
+
# Build base where clause
|
|
3306
|
+
where_clauses = []
|
|
3307
|
+
if component_type is not None:
|
|
3308
|
+
where_clauses.append(table.c.component_type == component_type.value)
|
|
3309
|
+
if not include_deleted:
|
|
3310
|
+
where_clauses.append(table.c.deleted_at.is_(None))
|
|
3311
|
+
|
|
3312
|
+
# Get total count
|
|
3313
|
+
count_stmt = select(func.count()).select_from(table)
|
|
3314
|
+
for clause in where_clauses:
|
|
3315
|
+
count_stmt = count_stmt.where(clause)
|
|
3316
|
+
total_count = sess.execute(count_stmt).scalar() or 0
|
|
3317
|
+
|
|
3318
|
+
# Get paginated results
|
|
3319
|
+
stmt = select(table).order_by(
|
|
3320
|
+
table.c.created_at.desc(),
|
|
3321
|
+
table.c.component_id,
|
|
3322
|
+
)
|
|
3323
|
+
for clause in where_clauses:
|
|
3324
|
+
stmt = stmt.where(clause)
|
|
3325
|
+
stmt = stmt.limit(limit).offset(offset)
|
|
3326
|
+
|
|
3327
|
+
results = sess.execute(stmt).fetchall()
|
|
3328
|
+
return [dict(row._mapping) for row in results], total_count
|
|
3329
|
+
|
|
3330
|
+
except Exception as e:
|
|
3331
|
+
log_error(f"Error listing components: {e}")
|
|
3332
|
+
raise
|
|
3333
|
+
|
|
3334
|
+
def create_component_with_config(
|
|
3335
|
+
self,
|
|
3336
|
+
component_id: str,
|
|
3337
|
+
component_type: ComponentType,
|
|
3338
|
+
name: Optional[str],
|
|
3339
|
+
config: Dict[str, Any],
|
|
3340
|
+
description: Optional[str] = None,
|
|
3341
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
3342
|
+
label: Optional[str] = None,
|
|
3343
|
+
stage: str = "draft",
|
|
3344
|
+
notes: Optional[str] = None,
|
|
3345
|
+
links: Optional[List[Dict[str, Any]]] = None,
|
|
3346
|
+
) -> Tuple[Dict[str, Any], Dict[str, Any]]:
|
|
3347
|
+
"""Create a component with its initial config atomically.
|
|
3348
|
+
|
|
3349
|
+
Args:
|
|
3350
|
+
component_id: Unique identifier.
|
|
3351
|
+
component_type: Type (agent|team|workflow).
|
|
3352
|
+
name: Display name.
|
|
3353
|
+
config: The config data.
|
|
3354
|
+
description: Optional description.
|
|
3355
|
+
metadata: Optional metadata dict.
|
|
3356
|
+
label: Optional config label.
|
|
3357
|
+
stage: "draft" or "published".
|
|
3358
|
+
notes: Optional notes.
|
|
3359
|
+
links: Optional list of links. Each must have child_version set.
|
|
3360
|
+
|
|
3361
|
+
Returns:
|
|
3362
|
+
Tuple of (component dict, config dict).
|
|
3363
|
+
|
|
3364
|
+
Raises:
|
|
3365
|
+
ValueError: If component already exists, invalid stage, or link missing child_version.
|
|
3366
|
+
"""
|
|
3367
|
+
if stage not in {"draft", "published"}:
|
|
3368
|
+
raise ValueError(f"Invalid stage: {stage}")
|
|
3369
|
+
|
|
3370
|
+
# Validate links have child_version
|
|
3371
|
+
if links:
|
|
3372
|
+
for link in links:
|
|
3373
|
+
if link.get("child_version") is None:
|
|
3374
|
+
raise ValueError(f"child_version is required for link to {link['child_component_id']}")
|
|
3375
|
+
|
|
3376
|
+
try:
|
|
3377
|
+
components_table = self._get_table(table_type="components", create_table_if_not_found=True)
|
|
3378
|
+
configs_table = self._get_table(table_type="component_configs", create_table_if_not_found=True)
|
|
3379
|
+
links_table = self._get_table(table_type="component_links", create_table_if_not_found=True)
|
|
3380
|
+
|
|
3381
|
+
if components_table is None:
|
|
3382
|
+
raise ValueError("Components table not found")
|
|
3383
|
+
if configs_table is None:
|
|
3384
|
+
raise ValueError("Component configs table not found")
|
|
3385
|
+
|
|
3386
|
+
with self.Session() as sess, sess.begin():
|
|
3387
|
+
# Check if component already exists
|
|
3388
|
+
existing = sess.execute(
|
|
3389
|
+
select(components_table.c.component_id).where(components_table.c.component_id == component_id)
|
|
3390
|
+
).scalar_one_or_none()
|
|
3391
|
+
|
|
3392
|
+
if existing is not None:
|
|
3393
|
+
raise ValueError(f"Component {component_id} already exists")
|
|
3394
|
+
|
|
3395
|
+
# Check label uniqueness
|
|
3396
|
+
if label is not None:
|
|
3397
|
+
existing_label = sess.execute(
|
|
3398
|
+
select(configs_table.c.version).where(
|
|
3399
|
+
configs_table.c.component_id == component_id,
|
|
3400
|
+
configs_table.c.label == label,
|
|
3401
|
+
)
|
|
3402
|
+
).first()
|
|
3403
|
+
if existing_label:
|
|
3404
|
+
raise ValueError(f"Label '{label}' already exists for {component_id}")
|
|
3405
|
+
|
|
3406
|
+
now = int(time.time())
|
|
3407
|
+
version = 1
|
|
3408
|
+
|
|
3409
|
+
# Create component
|
|
3410
|
+
sess.execute(
|
|
3411
|
+
components_table.insert().values(
|
|
3412
|
+
component_id=component_id,
|
|
3413
|
+
component_type=component_type.value,
|
|
3414
|
+
name=name,
|
|
3415
|
+
description=description,
|
|
3416
|
+
metadata=metadata,
|
|
3417
|
+
current_version=version if stage == "published" else None,
|
|
3418
|
+
created_at=now,
|
|
3419
|
+
)
|
|
3420
|
+
)
|
|
3421
|
+
|
|
3422
|
+
# Create initial config
|
|
3423
|
+
sess.execute(
|
|
3424
|
+
configs_table.insert().values(
|
|
3425
|
+
component_id=component_id,
|
|
3426
|
+
version=version,
|
|
3427
|
+
label=label,
|
|
3428
|
+
stage=stage,
|
|
3429
|
+
config=config,
|
|
3430
|
+
notes=notes,
|
|
3431
|
+
created_at=now,
|
|
3432
|
+
)
|
|
3433
|
+
)
|
|
3434
|
+
|
|
3435
|
+
# Create links if provided
|
|
3436
|
+
if links and links_table is not None:
|
|
3437
|
+
for link in links:
|
|
3438
|
+
sess.execute(
|
|
3439
|
+
links_table.insert().values(
|
|
3440
|
+
parent_component_id=component_id,
|
|
3441
|
+
parent_version=version,
|
|
3442
|
+
link_kind=link["link_kind"],
|
|
3443
|
+
link_key=link["link_key"],
|
|
3444
|
+
child_component_id=link["child_component_id"],
|
|
3445
|
+
child_version=link["child_version"],
|
|
3446
|
+
position=link["position"],
|
|
3447
|
+
meta=link.get("meta"),
|
|
3448
|
+
created_at=now,
|
|
3449
|
+
)
|
|
3450
|
+
)
|
|
3451
|
+
|
|
3452
|
+
# Fetch and return both
|
|
3453
|
+
component = self.get_component(component_id)
|
|
3454
|
+
config_result = self.get_config(component_id, version=version)
|
|
3455
|
+
|
|
3456
|
+
if component is None:
|
|
3457
|
+
raise ValueError(f"Failed to get component {component_id} after creation")
|
|
3458
|
+
if config_result is None:
|
|
3459
|
+
raise ValueError(f"Failed to get config for {component_id} after creation")
|
|
3460
|
+
|
|
3461
|
+
return component, config_result
|
|
3462
|
+
|
|
3463
|
+
except Exception as e:
|
|
3464
|
+
log_error(f"Error creating component with config: {e}")
|
|
3465
|
+
raise
|
|
3466
|
+
|
|
3467
|
+
# --- Config ---
|
|
3468
|
+
def get_config(
|
|
3469
|
+
self,
|
|
3470
|
+
component_id: str,
|
|
3471
|
+
version: Optional[int] = None,
|
|
3472
|
+
label: Optional[str] = None,
|
|
3473
|
+
) -> Optional[Dict[str, Any]]:
|
|
3474
|
+
"""Get a config by component ID and version or label.
|
|
3475
|
+
|
|
3476
|
+
Args:
|
|
3477
|
+
component_id: The component ID.
|
|
3478
|
+
version: Specific version number. If None, uses current or latest draft.
|
|
3479
|
+
label: Config label to lookup. Ignored if version is provided.
|
|
3480
|
+
|
|
3481
|
+
Returns:
|
|
3482
|
+
Config dictionary or None if not found.
|
|
3483
|
+
"""
|
|
3484
|
+
try:
|
|
3485
|
+
configs_table = self._get_table(table_type="component_configs")
|
|
3486
|
+
components_table = self._get_table(table_type="components")
|
|
3487
|
+
|
|
3488
|
+
if configs_table is None or components_table is None:
|
|
3489
|
+
return None
|
|
3490
|
+
|
|
3491
|
+
with self.Session() as sess:
|
|
3492
|
+
# Always verify component exists and is not deleted
|
|
3493
|
+
component_row = (
|
|
3494
|
+
sess.execute(
|
|
3495
|
+
select(components_table.c.current_version, components_table.c.component_id).where(
|
|
3496
|
+
components_table.c.component_id == component_id,
|
|
3497
|
+
components_table.c.deleted_at.is_(None),
|
|
3498
|
+
)
|
|
3499
|
+
)
|
|
3500
|
+
.mappings()
|
|
3501
|
+
.one_or_none()
|
|
3502
|
+
)
|
|
3503
|
+
|
|
3504
|
+
if component_row is None:
|
|
3505
|
+
return None
|
|
3506
|
+
|
|
3507
|
+
current_version = component_row["current_version"]
|
|
3508
|
+
|
|
3509
|
+
if version is not None:
|
|
3510
|
+
stmt = select(configs_table).where(
|
|
3511
|
+
configs_table.c.component_id == component_id,
|
|
3512
|
+
configs_table.c.version == version,
|
|
3513
|
+
)
|
|
3514
|
+
elif label is not None:
|
|
3515
|
+
stmt = select(configs_table).where(
|
|
3516
|
+
configs_table.c.component_id == component_id,
|
|
3517
|
+
configs_table.c.label == label,
|
|
3518
|
+
)
|
|
3519
|
+
elif current_version is not None:
|
|
3520
|
+
# Use the current published version
|
|
3521
|
+
stmt = select(configs_table).where(
|
|
3522
|
+
configs_table.c.component_id == component_id,
|
|
3523
|
+
configs_table.c.version == current_version,
|
|
3524
|
+
)
|
|
3525
|
+
else:
|
|
3526
|
+
# No current_version set (draft only) - get the latest version
|
|
3527
|
+
stmt = (
|
|
3528
|
+
select(configs_table)
|
|
3529
|
+
.where(configs_table.c.component_id == component_id)
|
|
3530
|
+
.order_by(configs_table.c.version.desc())
|
|
3531
|
+
.limit(1)
|
|
3532
|
+
)
|
|
3533
|
+
|
|
3534
|
+
result = sess.execute(stmt).fetchone()
|
|
3535
|
+
return dict(result._mapping) if result else None
|
|
3536
|
+
|
|
3537
|
+
except Exception as e:
|
|
3538
|
+
log_error(f"Error getting config: {e}")
|
|
3539
|
+
raise
|
|
3540
|
+
|
|
3541
|
+
def upsert_config(
|
|
3542
|
+
self,
|
|
3543
|
+
component_id: str,
|
|
3544
|
+
config: Optional[Dict[str, Any]] = None,
|
|
3545
|
+
version: Optional[int] = None,
|
|
3546
|
+
label: Optional[str] = None,
|
|
3547
|
+
stage: Optional[str] = None,
|
|
3548
|
+
notes: Optional[str] = None,
|
|
3549
|
+
links: Optional[List[Dict[str, Any]]] = None,
|
|
3550
|
+
) -> Dict[str, Any]:
|
|
3551
|
+
"""Create or update a config version for a component.
|
|
3552
|
+
|
|
3553
|
+
Rules:
|
|
3554
|
+
- Draft configs can be edited freely
|
|
3555
|
+
- Published configs are immutable
|
|
3556
|
+
- Publishing a config automatically sets it as current_version
|
|
3557
|
+
|
|
3558
|
+
Args:
|
|
3559
|
+
component_id: The component ID.
|
|
3560
|
+
config: The config data. Required for create, optional for update.
|
|
3561
|
+
version: If None, creates new version. If provided, updates that version.
|
|
3562
|
+
label: Optional human-readable label.
|
|
3563
|
+
stage: "draft" or "published". Defaults to "draft" for new configs.
|
|
3564
|
+
notes: Optional notes.
|
|
3565
|
+
links: Optional list of links. Each link must have child_version set.
|
|
3566
|
+
|
|
3567
|
+
Returns:
|
|
3568
|
+
Created/updated config dictionary.
|
|
3569
|
+
|
|
3570
|
+
Raises:
|
|
3571
|
+
ValueError: If component doesn't exist, version not found, label conflict,
|
|
3572
|
+
or attempting to update a published config.
|
|
3573
|
+
"""
|
|
3574
|
+
if stage is not None and stage not in {"draft", "published"}:
|
|
3575
|
+
raise ValueError(f"Invalid stage: {stage}")
|
|
3576
|
+
|
|
3577
|
+
try:
|
|
3578
|
+
configs_table = self._get_table(table_type="component_configs", create_table_if_not_found=True)
|
|
3579
|
+
components_table = self._get_table(table_type="components")
|
|
3580
|
+
links_table = self._get_table(table_type="component_links", create_table_if_not_found=True)
|
|
3581
|
+
|
|
3582
|
+
if components_table is None:
|
|
3583
|
+
raise ValueError("Components table not found")
|
|
3584
|
+
if configs_table is None:
|
|
3585
|
+
raise ValueError("Component configs table not found")
|
|
3586
|
+
|
|
3587
|
+
with self.Session() as sess, sess.begin():
|
|
3588
|
+
# Verify component exists and is not deleted
|
|
3589
|
+
component = sess.execute(
|
|
3590
|
+
select(components_table.c.component_id).where(
|
|
3591
|
+
components_table.c.component_id == component_id,
|
|
3592
|
+
components_table.c.deleted_at.is_(None),
|
|
3593
|
+
)
|
|
3594
|
+
).fetchone()
|
|
3595
|
+
|
|
3596
|
+
if component is None:
|
|
3597
|
+
raise ValueError(f"Component {component_id} not found")
|
|
3598
|
+
|
|
3599
|
+
# Label uniqueness check
|
|
3600
|
+
if label is not None:
|
|
3601
|
+
label_query = select(configs_table.c.version).where(
|
|
3602
|
+
configs_table.c.component_id == component_id,
|
|
3603
|
+
configs_table.c.label == label,
|
|
3604
|
+
)
|
|
3605
|
+
if version is not None:
|
|
3606
|
+
label_query = label_query.where(configs_table.c.version != version)
|
|
3607
|
+
|
|
3608
|
+
if sess.execute(label_query).first():
|
|
3609
|
+
raise ValueError(f"Label '{label}' already exists for {component_id}")
|
|
3610
|
+
|
|
3611
|
+
# Validate links have child_version
|
|
3612
|
+
if links:
|
|
3613
|
+
for link in links:
|
|
3614
|
+
if link.get("child_version") is None:
|
|
3615
|
+
raise ValueError(f"child_version is required for link to {link['child_component_id']}")
|
|
3616
|
+
|
|
3617
|
+
if version is None:
|
|
3618
|
+
if config is None:
|
|
3619
|
+
raise ValueError("config is required when creating a new version")
|
|
3620
|
+
|
|
3621
|
+
# Default to draft for new configs
|
|
3622
|
+
if stage is None:
|
|
3623
|
+
stage = "draft"
|
|
3624
|
+
|
|
3625
|
+
max_version = sess.execute(
|
|
3626
|
+
select(configs_table.c.version)
|
|
3627
|
+
.where(configs_table.c.component_id == component_id)
|
|
3628
|
+
.order_by(configs_table.c.version.desc())
|
|
3629
|
+
.limit(1)
|
|
3630
|
+
).scalar()
|
|
3631
|
+
|
|
3632
|
+
final_version = (max_version or 0) + 1
|
|
3633
|
+
|
|
3634
|
+
sess.execute(
|
|
3635
|
+
configs_table.insert().values(
|
|
3636
|
+
component_id=component_id,
|
|
3637
|
+
version=final_version,
|
|
3638
|
+
label=label,
|
|
3639
|
+
stage=stage,
|
|
3640
|
+
config=config,
|
|
3641
|
+
notes=notes,
|
|
3642
|
+
created_at=int(time.time()),
|
|
3643
|
+
)
|
|
3644
|
+
)
|
|
3645
|
+
else:
|
|
3646
|
+
existing = sess.execute(
|
|
3647
|
+
select(configs_table.c.version, configs_table.c.stage).where(
|
|
3648
|
+
configs_table.c.component_id == component_id,
|
|
3649
|
+
configs_table.c.version == version,
|
|
3650
|
+
)
|
|
3651
|
+
).fetchone()
|
|
3652
|
+
|
|
3653
|
+
if existing is None:
|
|
3654
|
+
raise ValueError(f"Config {component_id} v{version} not found")
|
|
3655
|
+
|
|
3656
|
+
# Published configs are immutable
|
|
3657
|
+
if existing.stage == "published":
|
|
3658
|
+
raise ValueError(f"Cannot update published config {component_id} v{version}")
|
|
3659
|
+
|
|
3660
|
+
# Build update dict with only provided fields
|
|
3661
|
+
updates: Dict[str, Any] = {"updated_at": int(time.time())}
|
|
3662
|
+
if label is not None:
|
|
3663
|
+
updates["label"] = label
|
|
3664
|
+
if stage is not None:
|
|
3665
|
+
updates["stage"] = stage
|
|
3666
|
+
if config is not None:
|
|
3667
|
+
updates["config"] = config
|
|
3668
|
+
if notes is not None:
|
|
3669
|
+
updates["notes"] = notes
|
|
3670
|
+
|
|
3671
|
+
sess.execute(
|
|
3672
|
+
configs_table.update()
|
|
3673
|
+
.where(
|
|
3674
|
+
configs_table.c.component_id == component_id,
|
|
3675
|
+
configs_table.c.version == version,
|
|
3676
|
+
)
|
|
3677
|
+
.values(**updates)
|
|
3678
|
+
)
|
|
3679
|
+
final_version = version
|
|
3680
|
+
|
|
3681
|
+
if links is not None and links_table is not None:
|
|
3682
|
+
sess.execute(
|
|
3683
|
+
links_table.delete().where(
|
|
3684
|
+
links_table.c.parent_component_id == component_id,
|
|
3685
|
+
links_table.c.parent_version == final_version,
|
|
3686
|
+
)
|
|
3687
|
+
)
|
|
3688
|
+
for link in links:
|
|
3689
|
+
sess.execute(
|
|
3690
|
+
links_table.insert().values(
|
|
3691
|
+
parent_component_id=component_id,
|
|
3692
|
+
parent_version=final_version,
|
|
3693
|
+
link_kind=link["link_kind"],
|
|
3694
|
+
link_key=link["link_key"],
|
|
3695
|
+
child_component_id=link["child_component_id"],
|
|
3696
|
+
child_version=link["child_version"],
|
|
3697
|
+
position=link["position"],
|
|
3698
|
+
meta=link.get("meta"),
|
|
3699
|
+
created_at=int(time.time()),
|
|
3700
|
+
)
|
|
3701
|
+
)
|
|
3702
|
+
|
|
3703
|
+
# Determine final stage (could be from update or create)
|
|
3704
|
+
final_stage = stage if stage is not None else (existing.stage if version is not None else "draft")
|
|
3705
|
+
|
|
3706
|
+
if final_stage == "published":
|
|
3707
|
+
sess.execute(
|
|
3708
|
+
components_table.update()
|
|
3709
|
+
.where(components_table.c.component_id == component_id)
|
|
3710
|
+
.values(current_version=final_version, updated_at=int(time.time()))
|
|
3711
|
+
)
|
|
3712
|
+
|
|
3713
|
+
result = self.get_config(component_id, version=final_version)
|
|
3714
|
+
if result is None:
|
|
3715
|
+
raise ValueError(f"Failed to get config {component_id} v{final_version} after upsert")
|
|
3716
|
+
return result
|
|
3717
|
+
|
|
3718
|
+
except Exception as e:
|
|
3719
|
+
log_error(f"Error upserting config: {e}")
|
|
3720
|
+
raise
|
|
3721
|
+
|
|
3722
|
+
def delete_config(
|
|
3723
|
+
self,
|
|
3724
|
+
component_id: str,
|
|
3725
|
+
version: int,
|
|
3726
|
+
) -> bool:
|
|
3727
|
+
"""Delete a specific config version.
|
|
3728
|
+
|
|
3729
|
+
Only draft configs can be deleted. Published configs are immutable.
|
|
3730
|
+
Cannot delete the current version.
|
|
3731
|
+
|
|
3732
|
+
Args:
|
|
3733
|
+
component_id: The component ID.
|
|
3734
|
+
version: The version to delete.
|
|
3735
|
+
|
|
3736
|
+
Returns:
|
|
3737
|
+
True if deleted, False if not found.
|
|
3738
|
+
|
|
3739
|
+
Raises:
|
|
3740
|
+
ValueError: If attempting to delete a published or current config.
|
|
3741
|
+
"""
|
|
3742
|
+
try:
|
|
3743
|
+
configs_table = self._get_table(table_type="component_configs")
|
|
3744
|
+
links_table = self._get_table(table_type="component_links")
|
|
3745
|
+
components_table = self._get_table(table_type="components")
|
|
3746
|
+
|
|
3747
|
+
if configs_table is None or components_table is None:
|
|
3748
|
+
return False
|
|
3749
|
+
|
|
3750
|
+
with self.Session() as sess, sess.begin():
|
|
3751
|
+
# Get config stage and check if it's current
|
|
3752
|
+
config_row = sess.execute(
|
|
3753
|
+
select(configs_table.c.stage).where(
|
|
3754
|
+
configs_table.c.component_id == component_id,
|
|
3755
|
+
configs_table.c.version == version,
|
|
3756
|
+
)
|
|
3757
|
+
).fetchone()
|
|
3758
|
+
|
|
3759
|
+
if config_row is None:
|
|
3760
|
+
return False
|
|
3761
|
+
|
|
3762
|
+
# Cannot delete published configs
|
|
3763
|
+
if config_row.stage == "published":
|
|
3764
|
+
raise ValueError(f"Cannot delete published config {component_id} v{version}")
|
|
3765
|
+
|
|
3766
|
+
# Check if it's current version
|
|
3767
|
+
current = sess.execute(
|
|
3768
|
+
select(components_table.c.current_version).where(components_table.c.component_id == component_id)
|
|
3769
|
+
).fetchone()
|
|
3770
|
+
|
|
3771
|
+
if current and current.current_version == version:
|
|
3772
|
+
raise ValueError(f"Cannot delete current config {component_id} v{version}")
|
|
3773
|
+
|
|
3774
|
+
# Delete associated links
|
|
3775
|
+
if links_table is not None:
|
|
3776
|
+
sess.execute(
|
|
3777
|
+
links_table.delete().where(
|
|
3778
|
+
links_table.c.parent_component_id == component_id,
|
|
3779
|
+
links_table.c.parent_version == version,
|
|
3780
|
+
)
|
|
3781
|
+
)
|
|
3782
|
+
|
|
3783
|
+
# Delete the config
|
|
3784
|
+
sess.execute(
|
|
3785
|
+
configs_table.delete().where(
|
|
3786
|
+
configs_table.c.component_id == component_id,
|
|
3787
|
+
configs_table.c.version == version,
|
|
3788
|
+
)
|
|
3789
|
+
)
|
|
3790
|
+
|
|
3791
|
+
return True
|
|
3792
|
+
|
|
3793
|
+
except Exception as e:
|
|
3794
|
+
log_error(f"Error deleting config: {e}")
|
|
3795
|
+
raise
|
|
3796
|
+
|
|
3797
|
+
def list_configs(
|
|
3798
|
+
self,
|
|
3799
|
+
component_id: str,
|
|
3800
|
+
include_config: bool = False,
|
|
3801
|
+
) -> List[Dict[str, Any]]:
|
|
3802
|
+
"""List all config versions for a component.
|
|
3803
|
+
|
|
3804
|
+
Args:
|
|
3805
|
+
component_id: The component ID.
|
|
3806
|
+
include_config: If True, include full config blob. Otherwise just metadata.
|
|
3807
|
+
|
|
3808
|
+
Returns:
|
|
3809
|
+
List of config dictionaries, newest first.
|
|
3810
|
+
Returns empty list if component not found or deleted.
|
|
3811
|
+
"""
|
|
3812
|
+
try:
|
|
3813
|
+
configs_table = self._get_table(table_type="component_configs")
|
|
3814
|
+
components_table = self._get_table(table_type="components")
|
|
3815
|
+
|
|
3816
|
+
if configs_table is None or components_table is None:
|
|
3817
|
+
return []
|
|
3818
|
+
|
|
3819
|
+
with self.Session() as sess:
|
|
3820
|
+
# Verify component exists and is not deleted
|
|
3821
|
+
exists = sess.execute(
|
|
3822
|
+
select(components_table.c.component_id).where(
|
|
3823
|
+
components_table.c.component_id == component_id,
|
|
3824
|
+
components_table.c.deleted_at.is_(None),
|
|
3825
|
+
)
|
|
3826
|
+
).fetchone()
|
|
3827
|
+
|
|
3828
|
+
if exists is None:
|
|
3829
|
+
return []
|
|
3830
|
+
|
|
3831
|
+
# Select columns based on include_config flag
|
|
3832
|
+
if include_config:
|
|
3833
|
+
stmt = select(configs_table)
|
|
3834
|
+
else:
|
|
3835
|
+
stmt = select(
|
|
3836
|
+
configs_table.c.component_id,
|
|
3837
|
+
configs_table.c.version,
|
|
3838
|
+
configs_table.c.label,
|
|
3839
|
+
configs_table.c.stage,
|
|
3840
|
+
configs_table.c.notes,
|
|
3841
|
+
configs_table.c.created_at,
|
|
3842
|
+
configs_table.c.updated_at,
|
|
3843
|
+
)
|
|
3844
|
+
|
|
3845
|
+
stmt = stmt.where(configs_table.c.component_id == component_id).order_by(configs_table.c.version.desc())
|
|
3846
|
+
|
|
3847
|
+
results = sess.execute(stmt).fetchall()
|
|
3848
|
+
return [dict(row._mapping) for row in results]
|
|
3849
|
+
|
|
3850
|
+
except Exception as e:
|
|
3851
|
+
log_error(f"Error listing configs: {e}")
|
|
3852
|
+
raise
|
|
3853
|
+
|
|
3854
|
+
def set_current_version(
|
|
3855
|
+
self,
|
|
3856
|
+
component_id: str,
|
|
3857
|
+
version: int,
|
|
3858
|
+
) -> bool:
|
|
3859
|
+
"""Set a specific published version as current.
|
|
3860
|
+
|
|
3861
|
+
Only published configs can be set as current. This is used for
|
|
3862
|
+
rollback scenarios where you want to switch to a previous
|
|
3863
|
+
published version.
|
|
3864
|
+
|
|
3865
|
+
Args:
|
|
3866
|
+
component_id: The component ID.
|
|
3867
|
+
version: The version to set as current (must be published).
|
|
3868
|
+
|
|
3869
|
+
Returns:
|
|
3870
|
+
True if successful, False if component or version not found.
|
|
3871
|
+
|
|
3872
|
+
Raises:
|
|
3873
|
+
ValueError: If attempting to set a draft config as current.
|
|
3874
|
+
"""
|
|
3875
|
+
try:
|
|
3876
|
+
configs_table = self._get_table(table_type="component_configs")
|
|
3877
|
+
components_table = self._get_table(table_type="components")
|
|
3878
|
+
|
|
3879
|
+
if configs_table is None or components_table is None:
|
|
3880
|
+
return False
|
|
3881
|
+
|
|
3882
|
+
with self.Session() as sess, sess.begin():
|
|
3883
|
+
# Verify component exists and is not deleted
|
|
3884
|
+
component_exists = sess.execute(
|
|
3885
|
+
select(components_table.c.component_id).where(
|
|
3886
|
+
components_table.c.component_id == component_id,
|
|
3887
|
+
components_table.c.deleted_at.is_(None),
|
|
3888
|
+
)
|
|
3889
|
+
).fetchone()
|
|
3890
|
+
|
|
3891
|
+
if component_exists is None:
|
|
3892
|
+
return False
|
|
3893
|
+
|
|
3894
|
+
# Verify version exists and get stage
|
|
3895
|
+
stage = sess.execute(
|
|
3896
|
+
select(configs_table.c.stage).where(
|
|
3897
|
+
configs_table.c.component_id == component_id,
|
|
3898
|
+
configs_table.c.version == version,
|
|
3899
|
+
)
|
|
3900
|
+
).fetchone()
|
|
3901
|
+
|
|
3902
|
+
if stage is None:
|
|
3903
|
+
return False
|
|
3904
|
+
|
|
3905
|
+
# Only published configs can be set as current
|
|
3906
|
+
if stage.stage != "published":
|
|
3907
|
+
raise ValueError(
|
|
3908
|
+
f"Cannot set draft config {component_id} v{version} as current. "
|
|
3909
|
+
"Only published configs can be current."
|
|
3910
|
+
)
|
|
3911
|
+
|
|
3912
|
+
# Update pointer
|
|
3913
|
+
sess.execute(
|
|
3914
|
+
components_table.update()
|
|
3915
|
+
.where(components_table.c.component_id == component_id)
|
|
3916
|
+
.values(current_version=version, updated_at=int(time.time()))
|
|
3917
|
+
)
|
|
3918
|
+
|
|
3919
|
+
log_debug(f"Set {component_id} current version to {version}")
|
|
3920
|
+
return True
|
|
3921
|
+
|
|
3922
|
+
except Exception as e:
|
|
3923
|
+
log_error(f"Error setting current version: {e}")
|
|
3924
|
+
raise
|
|
3925
|
+
|
|
3926
|
+
# --- Component Links ---
|
|
3927
|
+
def get_links(
|
|
3928
|
+
self,
|
|
3929
|
+
component_id: str,
|
|
3930
|
+
version: int,
|
|
3931
|
+
link_kind: Optional[str] = None,
|
|
3932
|
+
) -> List[Dict[str, Any]]:
|
|
3933
|
+
"""Get links for a config version.
|
|
3934
|
+
|
|
3935
|
+
Args:
|
|
3936
|
+
component_id: The component ID.
|
|
3937
|
+
version: The config version.
|
|
3938
|
+
link_kind: Optional filter by link kind (member|step).
|
|
3939
|
+
|
|
3940
|
+
Returns:
|
|
3941
|
+
List of link dictionaries, ordered by position.
|
|
3942
|
+
"""
|
|
3943
|
+
try:
|
|
3944
|
+
table = self._get_table(table_type="component_links")
|
|
3945
|
+
if table is None:
|
|
3946
|
+
return []
|
|
3947
|
+
|
|
3948
|
+
with self.Session() as sess:
|
|
3949
|
+
stmt = (
|
|
3950
|
+
select(table)
|
|
3951
|
+
.where(
|
|
3952
|
+
table.c.parent_component_id == component_id,
|
|
3953
|
+
table.c.parent_version == version,
|
|
3954
|
+
)
|
|
3955
|
+
.order_by(table.c.position)
|
|
3956
|
+
)
|
|
3957
|
+
if link_kind is not None:
|
|
3958
|
+
stmt = stmt.where(table.c.link_kind == link_kind)
|
|
3959
|
+
|
|
3960
|
+
results = sess.execute(stmt).fetchall()
|
|
3961
|
+
return [dict(row._mapping) for row in results]
|
|
3962
|
+
|
|
3963
|
+
except Exception as e:
|
|
3964
|
+
log_error(f"Error getting links: {e}")
|
|
3965
|
+
raise
|
|
3966
|
+
|
|
3967
|
+
def get_dependents(
|
|
3968
|
+
self,
|
|
3969
|
+
component_id: str,
|
|
3970
|
+
version: Optional[int] = None,
|
|
3971
|
+
) -> List[Dict[str, Any]]:
|
|
3972
|
+
"""Find all components that reference this component.
|
|
3973
|
+
|
|
3974
|
+
Args:
|
|
3975
|
+
component_id: The component ID to find dependents of.
|
|
3976
|
+
version: Optional specific version. If None, finds links to any version.
|
|
3977
|
+
|
|
3978
|
+
Returns:
|
|
3979
|
+
List of link dictionaries showing what depends on this component.
|
|
3980
|
+
"""
|
|
3981
|
+
try:
|
|
3982
|
+
table = self._get_table(table_type="component_links")
|
|
3983
|
+
if table is None:
|
|
3984
|
+
return []
|
|
3985
|
+
|
|
3986
|
+
with self.Session() as sess:
|
|
3987
|
+
stmt = select(table).where(table.c.child_component_id == component_id)
|
|
3988
|
+
if version is not None:
|
|
3989
|
+
stmt = stmt.where(table.c.child_version == version)
|
|
3990
|
+
|
|
3991
|
+
results = sess.execute(stmt).fetchall()
|
|
3992
|
+
return [dict(row._mapping) for row in results]
|
|
3993
|
+
|
|
3994
|
+
except Exception as e:
|
|
3995
|
+
log_error(f"Error getting dependents: {e}")
|
|
3996
|
+
raise
|
|
3997
|
+
|
|
3998
|
+
def resolve_version(
|
|
3999
|
+
self,
|
|
4000
|
+
component_id: str,
|
|
4001
|
+
version: Optional[int],
|
|
4002
|
+
) -> Optional[int]:
|
|
4003
|
+
"""Resolve a version number, handling NULL (current) case.
|
|
4004
|
+
|
|
4005
|
+
Args:
|
|
4006
|
+
component_id: The component ID.
|
|
4007
|
+
version: Version number or None for current.
|
|
4008
|
+
|
|
4009
|
+
Returns:
|
|
4010
|
+
Resolved version number or None if component not found.
|
|
4011
|
+
"""
|
|
4012
|
+
if version is not None:
|
|
4013
|
+
return version
|
|
4014
|
+
|
|
4015
|
+
try:
|
|
4016
|
+
components_table = self._get_table(table_type="components")
|
|
4017
|
+
if components_table is None:
|
|
4018
|
+
return None
|
|
4019
|
+
|
|
4020
|
+
with self.Session() as sess:
|
|
4021
|
+
result = sess.execute(
|
|
4022
|
+
select(components_table.c.current_version).where(components_table.c.component_id == component_id)
|
|
4023
|
+
).scalar()
|
|
4024
|
+
return result
|
|
4025
|
+
|
|
4026
|
+
except Exception as e:
|
|
4027
|
+
log_error(f"Error resolving version: {e}")
|
|
4028
|
+
raise
|
|
4029
|
+
|
|
4030
|
+
def load_component_graph(
|
|
4031
|
+
self,
|
|
4032
|
+
component_id: str,
|
|
4033
|
+
version: Optional[int] = None,
|
|
4034
|
+
) -> Optional[Dict[str, Any]]:
|
|
4035
|
+
"""Load a component with its full resolved graph.
|
|
4036
|
+
|
|
4037
|
+
Args:
|
|
4038
|
+
component_id: The component ID.
|
|
4039
|
+
version: Specific version or None for current.
|
|
4040
|
+
|
|
4041
|
+
Returns:
|
|
4042
|
+
Dictionary with component, config, links, and resolved children.
|
|
4043
|
+
"""
|
|
4044
|
+
try:
|
|
4045
|
+
# Get component
|
|
4046
|
+
component = self.get_component(component_id)
|
|
4047
|
+
if component is None:
|
|
4048
|
+
return None
|
|
4049
|
+
|
|
4050
|
+
# Resolve version
|
|
4051
|
+
resolved_version = self.resolve_version(component_id, version)
|
|
4052
|
+
if resolved_version is None:
|
|
4053
|
+
return None
|
|
4054
|
+
|
|
4055
|
+
# Get config
|
|
4056
|
+
config = self.get_config(component_id, version=resolved_version)
|
|
4057
|
+
if config is None:
|
|
4058
|
+
return None
|
|
4059
|
+
|
|
4060
|
+
# Get links
|
|
4061
|
+
links = self.get_links(component_id, resolved_version)
|
|
4062
|
+
|
|
4063
|
+
# Resolve children recursively
|
|
4064
|
+
children = []
|
|
4065
|
+
resolved_versions: Dict[str, Optional[int]] = {component_id: resolved_version}
|
|
4066
|
+
|
|
4067
|
+
for link in links:
|
|
4068
|
+
child_version = self.resolve_version(
|
|
4069
|
+
link["child_component_id"],
|
|
4070
|
+
link["child_version"],
|
|
4071
|
+
)
|
|
4072
|
+
resolved_versions[link["child_component_id"]] = child_version
|
|
4073
|
+
|
|
4074
|
+
child_graph = self.load_component_graph(
|
|
4075
|
+
link["child_component_id"],
|
|
4076
|
+
version=child_version,
|
|
4077
|
+
)
|
|
4078
|
+
|
|
4079
|
+
if child_graph:
|
|
4080
|
+
# Merge nested resolved versions
|
|
4081
|
+
resolved_versions.update(child_graph.get("resolved_versions", {}))
|
|
4082
|
+
|
|
4083
|
+
children.append(
|
|
4084
|
+
{
|
|
4085
|
+
"link": link,
|
|
4086
|
+
"graph": child_graph,
|
|
4087
|
+
}
|
|
4088
|
+
)
|
|
4089
|
+
|
|
4090
|
+
return {
|
|
4091
|
+
"component": component,
|
|
4092
|
+
"config": config,
|
|
4093
|
+
"children": children,
|
|
4094
|
+
"resolved_versions": resolved_versions,
|
|
4095
|
+
}
|
|
4096
|
+
|
|
4097
|
+
except Exception as e:
|
|
4098
|
+
log_error(f"Error loading component graph: {e}")
|
|
4099
|
+
raise
|
|
4100
|
+
|
|
4101
|
+
# -- Learning methods --
|
|
4102
|
+
def get_learning(
|
|
4103
|
+
self,
|
|
4104
|
+
learning_type: str,
|
|
4105
|
+
user_id: Optional[str] = None,
|
|
4106
|
+
agent_id: Optional[str] = None,
|
|
4107
|
+
team_id: Optional[str] = None,
|
|
4108
|
+
workflow_id: Optional[str] = None,
|
|
4109
|
+
session_id: Optional[str] = None,
|
|
4110
|
+
namespace: Optional[str] = None,
|
|
4111
|
+
entity_id: Optional[str] = None,
|
|
4112
|
+
entity_type: Optional[str] = None,
|
|
4113
|
+
) -> Optional[Dict[str, Any]]:
|
|
4114
|
+
"""Retrieve a learning record.
|
|
4115
|
+
|
|
4116
|
+
Args:
|
|
4117
|
+
learning_type: Type of learning ('user_profile', 'session_context', etc.)
|
|
4118
|
+
user_id: Filter by user ID.
|
|
4119
|
+
agent_id: Filter by agent ID.
|
|
4120
|
+
team_id: Filter by team ID.
|
|
4121
|
+
workflow_id: Filter by workflow ID.
|
|
4122
|
+
session_id: Filter by session ID.
|
|
4123
|
+
namespace: Filter by namespace ('user', 'global', or custom).
|
|
4124
|
+
entity_id: Filter by entity ID (for entity-specific learnings).
|
|
4125
|
+
entity_type: Filter by entity type ('person', 'company', etc.).
|
|
4126
|
+
|
|
4127
|
+
Returns:
|
|
4128
|
+
Dict with 'content' key containing the learning data, or None.
|
|
4129
|
+
"""
|
|
4130
|
+
try:
|
|
4131
|
+
table = self._get_table(table_type="learnings")
|
|
4132
|
+
if table is None:
|
|
4133
|
+
return None
|
|
4134
|
+
|
|
4135
|
+
with self.Session() as sess:
|
|
4136
|
+
stmt = select(table).where(table.c.learning_type == learning_type)
|
|
4137
|
+
|
|
4138
|
+
if user_id is not None:
|
|
4139
|
+
stmt = stmt.where(table.c.user_id == user_id)
|
|
4140
|
+
if agent_id is not None:
|
|
4141
|
+
stmt = stmt.where(table.c.agent_id == agent_id)
|
|
4142
|
+
if team_id is not None:
|
|
4143
|
+
stmt = stmt.where(table.c.team_id == team_id)
|
|
4144
|
+
if workflow_id is not None:
|
|
4145
|
+
stmt = stmt.where(table.c.workflow_id == workflow_id)
|
|
4146
|
+
if session_id is not None:
|
|
4147
|
+
stmt = stmt.where(table.c.session_id == session_id)
|
|
4148
|
+
if namespace is not None:
|
|
4149
|
+
stmt = stmt.where(table.c.namespace == namespace)
|
|
4150
|
+
if entity_id is not None:
|
|
4151
|
+
stmt = stmt.where(table.c.entity_id == entity_id)
|
|
4152
|
+
if entity_type is not None:
|
|
4153
|
+
stmt = stmt.where(table.c.entity_type == entity_type)
|
|
4154
|
+
|
|
4155
|
+
result = sess.execute(stmt).fetchone()
|
|
4156
|
+
if result is None:
|
|
4157
|
+
return None
|
|
4158
|
+
|
|
4159
|
+
row = dict(result._mapping)
|
|
4160
|
+
return {"content": row.get("content")}
|
|
4161
|
+
|
|
4162
|
+
except Exception as e:
|
|
4163
|
+
log_debug(f"Error retrieving learning: {e}")
|
|
4164
|
+
return None
|
|
4165
|
+
|
|
4166
|
+
def upsert_learning(
|
|
4167
|
+
self,
|
|
4168
|
+
id: str,
|
|
4169
|
+
learning_type: str,
|
|
4170
|
+
content: Dict[str, Any],
|
|
4171
|
+
user_id: Optional[str] = None,
|
|
4172
|
+
agent_id: Optional[str] = None,
|
|
4173
|
+
team_id: Optional[str] = None,
|
|
4174
|
+
workflow_id: Optional[str] = None,
|
|
4175
|
+
session_id: Optional[str] = None,
|
|
4176
|
+
namespace: Optional[str] = None,
|
|
4177
|
+
entity_id: Optional[str] = None,
|
|
4178
|
+
entity_type: Optional[str] = None,
|
|
4179
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
4180
|
+
) -> None:
|
|
4181
|
+
"""Insert or update a learning record.
|
|
4182
|
+
|
|
4183
|
+
Args:
|
|
4184
|
+
id: Unique identifier for the learning.
|
|
4185
|
+
learning_type: Type of learning ('user_profile', 'session_context', etc.)
|
|
4186
|
+
content: The learning content as a dict.
|
|
4187
|
+
user_id: Associated user ID.
|
|
4188
|
+
agent_id: Associated agent ID.
|
|
4189
|
+
team_id: Associated team ID.
|
|
4190
|
+
workflow_id: Associated workflow ID.
|
|
4191
|
+
session_id: Associated session ID.
|
|
4192
|
+
namespace: Namespace for scoping ('user', 'global', or custom).
|
|
4193
|
+
entity_id: Associated entity ID (for entity-specific learnings).
|
|
4194
|
+
entity_type: Entity type ('person', 'company', etc.).
|
|
4195
|
+
metadata: Optional metadata.
|
|
4196
|
+
"""
|
|
4197
|
+
try:
|
|
4198
|
+
table = self._get_table(table_type="learnings", create_table_if_not_found=True)
|
|
4199
|
+
if table is None:
|
|
4200
|
+
return
|
|
4201
|
+
|
|
4202
|
+
current_time = int(time.time())
|
|
4203
|
+
|
|
4204
|
+
with self.Session() as sess, sess.begin():
|
|
4205
|
+
stmt = sqlite.insert(table).values(
|
|
4206
|
+
learning_id=id,
|
|
4207
|
+
learning_type=learning_type,
|
|
4208
|
+
namespace=namespace,
|
|
4209
|
+
user_id=user_id,
|
|
4210
|
+
agent_id=agent_id,
|
|
4211
|
+
team_id=team_id,
|
|
4212
|
+
workflow_id=workflow_id,
|
|
4213
|
+
session_id=session_id,
|
|
4214
|
+
entity_id=entity_id,
|
|
4215
|
+
entity_type=entity_type,
|
|
4216
|
+
content=content,
|
|
4217
|
+
metadata=metadata,
|
|
4218
|
+
created_at=current_time,
|
|
4219
|
+
updated_at=current_time,
|
|
4220
|
+
)
|
|
4221
|
+
stmt = stmt.on_conflict_do_update(
|
|
4222
|
+
index_elements=["learning_id"],
|
|
4223
|
+
set_=dict(
|
|
4224
|
+
content=content,
|
|
4225
|
+
metadata=metadata,
|
|
4226
|
+
updated_at=current_time,
|
|
4227
|
+
),
|
|
4228
|
+
)
|
|
4229
|
+
sess.execute(stmt)
|
|
4230
|
+
|
|
4231
|
+
log_debug(f"Upserted learning: {id}")
|
|
4232
|
+
|
|
4233
|
+
except Exception as e:
|
|
4234
|
+
log_debug(f"Error upserting learning: {e}")
|
|
4235
|
+
|
|
4236
|
+
def delete_learning(self, id: str) -> bool:
|
|
4237
|
+
"""Delete a learning record.
|
|
4238
|
+
|
|
4239
|
+
Args:
|
|
4240
|
+
id: The learning ID to delete.
|
|
4241
|
+
|
|
4242
|
+
Returns:
|
|
4243
|
+
True if deleted, False otherwise.
|
|
4244
|
+
"""
|
|
4245
|
+
try:
|
|
4246
|
+
table = self._get_table(table_type="learnings")
|
|
4247
|
+
if table is None:
|
|
4248
|
+
return False
|
|
4249
|
+
|
|
4250
|
+
with self.Session() as sess, sess.begin():
|
|
4251
|
+
stmt = table.delete().where(table.c.learning_id == id)
|
|
4252
|
+
result = sess.execute(stmt)
|
|
4253
|
+
return result.rowcount > 0
|
|
4254
|
+
|
|
4255
|
+
except Exception as e:
|
|
4256
|
+
log_debug(f"Error deleting learning: {e}")
|
|
4257
|
+
return False
|
|
4258
|
+
|
|
4259
|
+
def get_learnings(
|
|
4260
|
+
self,
|
|
4261
|
+
learning_type: Optional[str] = None,
|
|
4262
|
+
user_id: Optional[str] = None,
|
|
4263
|
+
agent_id: Optional[str] = None,
|
|
4264
|
+
team_id: Optional[str] = None,
|
|
4265
|
+
workflow_id: Optional[str] = None,
|
|
4266
|
+
session_id: Optional[str] = None,
|
|
4267
|
+
namespace: Optional[str] = None,
|
|
4268
|
+
entity_id: Optional[str] = None,
|
|
4269
|
+
entity_type: Optional[str] = None,
|
|
4270
|
+
limit: Optional[int] = None,
|
|
4271
|
+
) -> List[Dict[str, Any]]:
|
|
4272
|
+
"""Get multiple learning records.
|
|
4273
|
+
|
|
4274
|
+
Args:
|
|
4275
|
+
learning_type: Filter by learning type.
|
|
4276
|
+
user_id: Filter by user ID.
|
|
4277
|
+
agent_id: Filter by agent ID.
|
|
4278
|
+
team_id: Filter by team ID.
|
|
4279
|
+
workflow_id: Filter by workflow ID.
|
|
4280
|
+
session_id: Filter by session ID.
|
|
4281
|
+
namespace: Filter by namespace ('user', 'global', or custom).
|
|
4282
|
+
entity_id: Filter by entity ID (for entity-specific learnings).
|
|
4283
|
+
entity_type: Filter by entity type ('person', 'company', etc.).
|
|
4284
|
+
limit: Maximum number of records to return.
|
|
4285
|
+
|
|
4286
|
+
Returns:
|
|
4287
|
+
List of learning records.
|
|
4288
|
+
"""
|
|
4289
|
+
try:
|
|
4290
|
+
table = self._get_table(table_type="learnings")
|
|
4291
|
+
if table is None:
|
|
4292
|
+
return []
|
|
4293
|
+
|
|
4294
|
+
with self.Session() as sess:
|
|
4295
|
+
stmt = select(table)
|
|
4296
|
+
|
|
4297
|
+
if learning_type is not None:
|
|
4298
|
+
stmt = stmt.where(table.c.learning_type == learning_type)
|
|
4299
|
+
if user_id is not None:
|
|
4300
|
+
stmt = stmt.where(table.c.user_id == user_id)
|
|
4301
|
+
if agent_id is not None:
|
|
4302
|
+
stmt = stmt.where(table.c.agent_id == agent_id)
|
|
4303
|
+
if team_id is not None:
|
|
4304
|
+
stmt = stmt.where(table.c.team_id == team_id)
|
|
4305
|
+
if workflow_id is not None:
|
|
4306
|
+
stmt = stmt.where(table.c.workflow_id == workflow_id)
|
|
4307
|
+
if session_id is not None:
|
|
4308
|
+
stmt = stmt.where(table.c.session_id == session_id)
|
|
4309
|
+
if namespace is not None:
|
|
4310
|
+
stmt = stmt.where(table.c.namespace == namespace)
|
|
4311
|
+
if entity_id is not None:
|
|
4312
|
+
stmt = stmt.where(table.c.entity_id == entity_id)
|
|
4313
|
+
if entity_type is not None:
|
|
4314
|
+
stmt = stmt.where(table.c.entity_type == entity_type)
|
|
4315
|
+
|
|
4316
|
+
stmt = stmt.order_by(table.c.updated_at.desc())
|
|
4317
|
+
|
|
4318
|
+
if limit is not None:
|
|
4319
|
+
stmt = stmt.limit(limit)
|
|
4320
|
+
|
|
4321
|
+
results = sess.execute(stmt).fetchall()
|
|
4322
|
+
return [dict(row._mapping) for row in results]
|
|
4323
|
+
|
|
4324
|
+
except Exception as e:
|
|
4325
|
+
log_debug(f"Error getting learnings: {e}")
|
|
4326
|
+
return []
|