agno 2.1.2__py3-none-any.whl → 2.3.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agno/agent/agent.py +5540 -2273
- agno/api/api.py +2 -0
- agno/api/os.py +1 -1
- agno/compression/__init__.py +3 -0
- agno/compression/manager.py +247 -0
- agno/culture/__init__.py +3 -0
- agno/culture/manager.py +956 -0
- agno/db/async_postgres/__init__.py +3 -0
- agno/db/base.py +689 -6
- agno/db/dynamo/dynamo.py +933 -37
- agno/db/dynamo/schemas.py +174 -10
- agno/db/dynamo/utils.py +63 -4
- agno/db/firestore/firestore.py +831 -9
- agno/db/firestore/schemas.py +51 -0
- agno/db/firestore/utils.py +102 -4
- agno/db/gcs_json/gcs_json_db.py +660 -12
- agno/db/gcs_json/utils.py +60 -26
- agno/db/in_memory/in_memory_db.py +287 -14
- agno/db/in_memory/utils.py +60 -2
- agno/db/json/json_db.py +590 -14
- agno/db/json/utils.py +60 -26
- agno/db/migrations/manager.py +199 -0
- agno/db/migrations/v1_to_v2.py +43 -13
- agno/db/migrations/versions/__init__.py +0 -0
- agno/db/migrations/versions/v2_3_0.py +938 -0
- agno/db/mongo/__init__.py +15 -1
- agno/db/mongo/async_mongo.py +2760 -0
- agno/db/mongo/mongo.py +879 -11
- agno/db/mongo/schemas.py +42 -0
- agno/db/mongo/utils.py +80 -8
- agno/db/mysql/__init__.py +2 -1
- agno/db/mysql/async_mysql.py +2912 -0
- agno/db/mysql/mysql.py +946 -68
- agno/db/mysql/schemas.py +72 -10
- agno/db/mysql/utils.py +198 -7
- agno/db/postgres/__init__.py +2 -1
- agno/db/postgres/async_postgres.py +2579 -0
- agno/db/postgres/postgres.py +942 -57
- agno/db/postgres/schemas.py +81 -18
- agno/db/postgres/utils.py +164 -2
- agno/db/redis/redis.py +671 -7
- agno/db/redis/schemas.py +50 -0
- agno/db/redis/utils.py +65 -7
- agno/db/schemas/__init__.py +2 -1
- agno/db/schemas/culture.py +120 -0
- agno/db/schemas/evals.py +1 -0
- agno/db/schemas/memory.py +17 -2
- agno/db/singlestore/schemas.py +63 -0
- agno/db/singlestore/singlestore.py +949 -83
- agno/db/singlestore/utils.py +60 -2
- agno/db/sqlite/__init__.py +2 -1
- agno/db/sqlite/async_sqlite.py +2911 -0
- agno/db/sqlite/schemas.py +62 -0
- agno/db/sqlite/sqlite.py +965 -46
- agno/db/sqlite/utils.py +169 -8
- agno/db/surrealdb/__init__.py +3 -0
- agno/db/surrealdb/metrics.py +292 -0
- agno/db/surrealdb/models.py +334 -0
- agno/db/surrealdb/queries.py +71 -0
- agno/db/surrealdb/surrealdb.py +1908 -0
- agno/db/surrealdb/utils.py +147 -0
- agno/db/utils.py +2 -0
- agno/eval/__init__.py +10 -0
- agno/eval/accuracy.py +75 -55
- agno/eval/agent_as_judge.py +861 -0
- agno/eval/base.py +29 -0
- agno/eval/performance.py +16 -7
- agno/eval/reliability.py +28 -16
- agno/eval/utils.py +35 -17
- agno/exceptions.py +27 -2
- agno/filters.py +354 -0
- agno/guardrails/prompt_injection.py +1 -0
- agno/hooks/__init__.py +3 -0
- agno/hooks/decorator.py +164 -0
- agno/integrations/discord/client.py +1 -1
- agno/knowledge/chunking/agentic.py +13 -10
- agno/knowledge/chunking/fixed.py +4 -1
- agno/knowledge/chunking/semantic.py +9 -4
- agno/knowledge/chunking/strategy.py +59 -15
- agno/knowledge/embedder/fastembed.py +1 -1
- agno/knowledge/embedder/nebius.py +1 -1
- agno/knowledge/embedder/ollama.py +8 -0
- agno/knowledge/embedder/openai.py +8 -8
- agno/knowledge/embedder/sentence_transformer.py +6 -2
- agno/knowledge/embedder/vllm.py +262 -0
- agno/knowledge/knowledge.py +1618 -318
- agno/knowledge/reader/base.py +6 -2
- agno/knowledge/reader/csv_reader.py +8 -10
- agno/knowledge/reader/docx_reader.py +5 -6
- agno/knowledge/reader/field_labeled_csv_reader.py +16 -20
- agno/knowledge/reader/json_reader.py +5 -4
- agno/knowledge/reader/markdown_reader.py +8 -8
- agno/knowledge/reader/pdf_reader.py +17 -19
- agno/knowledge/reader/pptx_reader.py +101 -0
- agno/knowledge/reader/reader_factory.py +32 -3
- agno/knowledge/reader/s3_reader.py +3 -3
- agno/knowledge/reader/tavily_reader.py +193 -0
- agno/knowledge/reader/text_reader.py +22 -10
- agno/knowledge/reader/web_search_reader.py +1 -48
- agno/knowledge/reader/website_reader.py +10 -10
- agno/knowledge/reader/wikipedia_reader.py +33 -1
- agno/knowledge/types.py +1 -0
- agno/knowledge/utils.py +72 -7
- agno/media.py +22 -6
- agno/memory/__init__.py +14 -1
- agno/memory/manager.py +544 -83
- agno/memory/strategies/__init__.py +15 -0
- agno/memory/strategies/base.py +66 -0
- agno/memory/strategies/summarize.py +196 -0
- agno/memory/strategies/types.py +37 -0
- agno/models/aimlapi/aimlapi.py +17 -0
- agno/models/anthropic/claude.py +515 -40
- agno/models/aws/bedrock.py +102 -21
- agno/models/aws/claude.py +131 -274
- agno/models/azure/ai_foundry.py +41 -19
- agno/models/azure/openai_chat.py +39 -8
- agno/models/base.py +1249 -525
- agno/models/cerebras/cerebras.py +91 -21
- agno/models/cerebras/cerebras_openai.py +21 -2
- agno/models/cohere/chat.py +40 -6
- agno/models/cometapi/cometapi.py +18 -1
- agno/models/dashscope/dashscope.py +2 -3
- agno/models/deepinfra/deepinfra.py +18 -1
- agno/models/deepseek/deepseek.py +69 -3
- agno/models/fireworks/fireworks.py +18 -1
- agno/models/google/gemini.py +877 -80
- agno/models/google/utils.py +22 -0
- agno/models/groq/groq.py +51 -18
- agno/models/huggingface/huggingface.py +17 -6
- agno/models/ibm/watsonx.py +16 -6
- agno/models/internlm/internlm.py +18 -1
- agno/models/langdb/langdb.py +13 -1
- agno/models/litellm/chat.py +44 -9
- agno/models/litellm/litellm_openai.py +18 -1
- agno/models/message.py +28 -5
- agno/models/meta/llama.py +47 -14
- agno/models/meta/llama_openai.py +22 -17
- agno/models/mistral/mistral.py +8 -4
- agno/models/nebius/nebius.py +6 -7
- agno/models/nvidia/nvidia.py +20 -3
- agno/models/ollama/chat.py +24 -8
- agno/models/openai/chat.py +104 -29
- agno/models/openai/responses.py +101 -81
- agno/models/openrouter/openrouter.py +60 -3
- agno/models/perplexity/perplexity.py +17 -1
- agno/models/portkey/portkey.py +7 -6
- agno/models/requesty/requesty.py +24 -4
- agno/models/response.py +73 -2
- agno/models/sambanova/sambanova.py +20 -3
- agno/models/siliconflow/siliconflow.py +19 -2
- agno/models/together/together.py +20 -3
- agno/models/utils.py +254 -8
- agno/models/vercel/v0.py +20 -3
- agno/models/vertexai/__init__.py +0 -0
- agno/models/vertexai/claude.py +190 -0
- agno/models/vllm/vllm.py +19 -14
- agno/models/xai/xai.py +19 -2
- agno/os/app.py +549 -152
- agno/os/auth.py +190 -3
- agno/os/config.py +23 -0
- agno/os/interfaces/a2a/router.py +8 -11
- agno/os/interfaces/a2a/utils.py +1 -1
- agno/os/interfaces/agui/router.py +18 -3
- agno/os/interfaces/agui/utils.py +152 -39
- agno/os/interfaces/slack/router.py +55 -37
- agno/os/interfaces/slack/slack.py +9 -1
- agno/os/interfaces/whatsapp/router.py +0 -1
- agno/os/interfaces/whatsapp/security.py +3 -1
- agno/os/mcp.py +110 -52
- agno/os/middleware/__init__.py +2 -0
- agno/os/middleware/jwt.py +676 -112
- agno/os/router.py +40 -1478
- agno/os/routers/agents/__init__.py +3 -0
- agno/os/routers/agents/router.py +599 -0
- agno/os/routers/agents/schema.py +261 -0
- agno/os/routers/evals/evals.py +96 -39
- agno/os/routers/evals/schemas.py +65 -33
- agno/os/routers/evals/utils.py +80 -10
- agno/os/routers/health.py +10 -4
- agno/os/routers/knowledge/knowledge.py +196 -38
- agno/os/routers/knowledge/schemas.py +82 -22
- agno/os/routers/memory/memory.py +279 -52
- agno/os/routers/memory/schemas.py +46 -17
- agno/os/routers/metrics/metrics.py +20 -8
- agno/os/routers/metrics/schemas.py +16 -16
- agno/os/routers/session/session.py +462 -34
- agno/os/routers/teams/__init__.py +3 -0
- agno/os/routers/teams/router.py +512 -0
- agno/os/routers/teams/schema.py +257 -0
- agno/os/routers/traces/__init__.py +3 -0
- agno/os/routers/traces/schemas.py +414 -0
- agno/os/routers/traces/traces.py +499 -0
- agno/os/routers/workflows/__init__.py +3 -0
- agno/os/routers/workflows/router.py +624 -0
- agno/os/routers/workflows/schema.py +75 -0
- agno/os/schema.py +256 -693
- agno/os/scopes.py +469 -0
- agno/os/utils.py +514 -36
- agno/reasoning/anthropic.py +80 -0
- agno/reasoning/gemini.py +73 -0
- agno/reasoning/openai.py +5 -0
- agno/reasoning/vertexai.py +76 -0
- agno/run/__init__.py +6 -0
- agno/run/agent.py +155 -32
- agno/run/base.py +55 -3
- agno/run/requirement.py +181 -0
- agno/run/team.py +125 -38
- agno/run/workflow.py +72 -18
- agno/session/agent.py +102 -89
- agno/session/summary.py +56 -15
- agno/session/team.py +164 -90
- agno/session/workflow.py +405 -40
- agno/table.py +10 -0
- agno/team/team.py +3974 -1903
- agno/tools/dalle.py +2 -4
- agno/tools/eleven_labs.py +23 -25
- agno/tools/exa.py +21 -16
- agno/tools/file.py +153 -23
- agno/tools/file_generation.py +16 -10
- agno/tools/firecrawl.py +15 -7
- agno/tools/function.py +193 -38
- agno/tools/gmail.py +238 -14
- agno/tools/google_drive.py +271 -0
- agno/tools/googlecalendar.py +36 -8
- agno/tools/googlesheets.py +20 -5
- agno/tools/jira.py +20 -0
- agno/tools/mcp/__init__.py +10 -0
- agno/tools/mcp/mcp.py +331 -0
- agno/tools/mcp/multi_mcp.py +347 -0
- agno/tools/mcp/params.py +24 -0
- agno/tools/mcp_toolbox.py +3 -3
- agno/tools/models/nebius.py +5 -5
- agno/tools/models_labs.py +20 -10
- agno/tools/nano_banana.py +151 -0
- agno/tools/notion.py +204 -0
- agno/tools/parallel.py +314 -0
- agno/tools/postgres.py +76 -36
- agno/tools/redshift.py +406 -0
- agno/tools/scrapegraph.py +1 -1
- agno/tools/shopify.py +1519 -0
- agno/tools/slack.py +18 -3
- agno/tools/spotify.py +919 -0
- agno/tools/tavily.py +146 -0
- agno/tools/toolkit.py +25 -0
- agno/tools/workflow.py +8 -1
- agno/tools/yfinance.py +12 -11
- agno/tracing/__init__.py +12 -0
- agno/tracing/exporter.py +157 -0
- agno/tracing/schemas.py +276 -0
- agno/tracing/setup.py +111 -0
- agno/utils/agent.py +938 -0
- agno/utils/cryptography.py +22 -0
- agno/utils/dttm.py +33 -0
- agno/utils/events.py +151 -3
- agno/utils/gemini.py +15 -5
- agno/utils/hooks.py +118 -4
- agno/utils/http.py +113 -2
- agno/utils/knowledge.py +12 -5
- agno/utils/log.py +1 -0
- agno/utils/mcp.py +92 -2
- agno/utils/media.py +187 -1
- agno/utils/merge_dict.py +3 -3
- agno/utils/message.py +60 -0
- agno/utils/models/ai_foundry.py +9 -2
- agno/utils/models/claude.py +49 -14
- agno/utils/models/cohere.py +9 -2
- agno/utils/models/llama.py +9 -2
- agno/utils/models/mistral.py +4 -2
- agno/utils/print_response/agent.py +109 -16
- agno/utils/print_response/team.py +223 -30
- agno/utils/print_response/workflow.py +251 -34
- agno/utils/streamlit.py +1 -1
- agno/utils/team.py +98 -9
- agno/utils/tokens.py +657 -0
- agno/vectordb/base.py +39 -7
- agno/vectordb/cassandra/cassandra.py +21 -5
- agno/vectordb/chroma/chromadb.py +43 -12
- agno/vectordb/clickhouse/clickhousedb.py +21 -5
- agno/vectordb/couchbase/couchbase.py +29 -5
- agno/vectordb/lancedb/lance_db.py +92 -181
- agno/vectordb/langchaindb/langchaindb.py +24 -4
- agno/vectordb/lightrag/lightrag.py +17 -3
- agno/vectordb/llamaindex/llamaindexdb.py +25 -5
- agno/vectordb/milvus/milvus.py +50 -37
- agno/vectordb/mongodb/__init__.py +7 -1
- agno/vectordb/mongodb/mongodb.py +36 -30
- agno/vectordb/pgvector/pgvector.py +201 -77
- agno/vectordb/pineconedb/pineconedb.py +41 -23
- agno/vectordb/qdrant/qdrant.py +67 -54
- agno/vectordb/redis/__init__.py +9 -0
- agno/vectordb/redis/redisdb.py +682 -0
- agno/vectordb/singlestore/singlestore.py +50 -29
- agno/vectordb/surrealdb/surrealdb.py +31 -41
- agno/vectordb/upstashdb/upstashdb.py +34 -6
- agno/vectordb/weaviate/weaviate.py +53 -14
- agno/workflow/__init__.py +2 -0
- agno/workflow/agent.py +299 -0
- agno/workflow/condition.py +120 -18
- agno/workflow/loop.py +77 -10
- agno/workflow/parallel.py +231 -143
- agno/workflow/router.py +118 -17
- agno/workflow/step.py +609 -170
- agno/workflow/steps.py +73 -6
- agno/workflow/types.py +96 -21
- agno/workflow/workflow.py +2039 -262
- {agno-2.1.2.dist-info → agno-2.3.13.dist-info}/METADATA +201 -66
- agno-2.3.13.dist-info/RECORD +613 -0
- agno/tools/googlesearch.py +0 -98
- agno/tools/mcp.py +0 -679
- agno/tools/memori.py +0 -339
- agno-2.1.2.dist-info/RECORD +0 -543
- {agno-2.1.2.dist-info → agno-2.3.13.dist-info}/WHEEL +0 -0
- {agno-2.1.2.dist-info → agno-2.3.13.dist-info}/licenses/LICENSE +0 -0
- {agno-2.1.2.dist-info → agno-2.3.13.dist-info}/top_level.txt +0 -0
|
@@ -1,10 +1,15 @@
|
|
|
1
1
|
import json
|
|
2
2
|
import time
|
|
3
3
|
from datetime import date, datetime, timedelta, timezone
|
|
4
|
-
from typing import Any, Dict, List, Optional, Tuple, Union
|
|
4
|
+
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
|
|
5
5
|
from uuid import uuid4
|
|
6
6
|
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from agno.tracing.schemas import Span, Trace
|
|
9
|
+
|
|
7
10
|
from agno.db.base import BaseDb, SessionType
|
|
11
|
+
from agno.db.migrations.manager import MigrationManager
|
|
12
|
+
from agno.db.schemas.culture import CulturalKnowledge
|
|
8
13
|
from agno.db.schemas.evals import EvalFilterType, EvalRunRecord, EvalType
|
|
9
14
|
from agno.db.schemas.knowledge import KnowledgeRow
|
|
10
15
|
from agno.db.schemas.memory import UserMemory
|
|
@@ -14,22 +19,24 @@ from agno.db.singlestore.utils import (
|
|
|
14
19
|
bulk_upsert_metrics,
|
|
15
20
|
calculate_date_metrics,
|
|
16
21
|
create_schema,
|
|
22
|
+
deserialize_cultural_knowledge_from_db,
|
|
17
23
|
fetch_all_sessions_data,
|
|
18
24
|
get_dates_to_calculate_metrics_for,
|
|
19
25
|
is_table_available,
|
|
20
26
|
is_valid_table,
|
|
27
|
+
serialize_cultural_knowledge_for_db,
|
|
21
28
|
)
|
|
22
29
|
from agno.session import AgentSession, Session, TeamSession, WorkflowSession
|
|
23
30
|
from agno.utils.log import log_debug, log_error, log_info, log_warning
|
|
24
31
|
from agno.utils.string import generate_id
|
|
25
32
|
|
|
26
33
|
try:
|
|
27
|
-
from sqlalchemy import Index, UniqueConstraint, and_, func, update
|
|
34
|
+
from sqlalchemy import Index, UniqueConstraint, and_, func, select, update
|
|
28
35
|
from sqlalchemy.dialects import mysql
|
|
29
36
|
from sqlalchemy.engine import Engine, create_engine
|
|
30
37
|
from sqlalchemy.orm import scoped_session, sessionmaker
|
|
31
38
|
from sqlalchemy.schema import Column, MetaData, Table
|
|
32
|
-
from sqlalchemy.sql.expression import
|
|
39
|
+
from sqlalchemy.sql.expression import text
|
|
33
40
|
except ImportError:
|
|
34
41
|
raise ImportError("`sqlalchemy` not installed. Please install it using `pip install sqlalchemy`")
|
|
35
42
|
|
|
@@ -42,10 +49,15 @@ class SingleStoreDb(BaseDb):
|
|
|
42
49
|
db_schema: Optional[str] = None,
|
|
43
50
|
db_url: Optional[str] = None,
|
|
44
51
|
session_table: Optional[str] = None,
|
|
52
|
+
culture_table: Optional[str] = None,
|
|
45
53
|
memory_table: Optional[str] = None,
|
|
46
54
|
metrics_table: Optional[str] = None,
|
|
47
55
|
eval_table: Optional[str] = None,
|
|
48
56
|
knowledge_table: Optional[str] = None,
|
|
57
|
+
versions_table: Optional[str] = None,
|
|
58
|
+
traces_table: Optional[str] = None,
|
|
59
|
+
spans_table: Optional[str] = None,
|
|
60
|
+
create_schema: bool = True,
|
|
49
61
|
):
|
|
50
62
|
"""
|
|
51
63
|
Interface for interacting with a SingleStore database.
|
|
@@ -61,11 +73,14 @@ class SingleStoreDb(BaseDb):
|
|
|
61
73
|
db_schema (Optional[str]): The database schema to use.
|
|
62
74
|
db_url (Optional[str]): The database URL to connect to.
|
|
63
75
|
session_table (Optional[str]): Name of the table to store Agent, Team and Workflow sessions.
|
|
76
|
+
culture_table (Optional[str]): Name of the table to store cultural knowledge.
|
|
64
77
|
memory_table (Optional[str]): Name of the table to store memories.
|
|
65
78
|
metrics_table (Optional[str]): Name of the table to store metrics.
|
|
66
79
|
eval_table (Optional[str]): Name of the table to store evaluation runs data.
|
|
67
80
|
knowledge_table (Optional[str]): Name of the table to store knowledge content.
|
|
68
|
-
|
|
81
|
+
versions_table (Optional[str]): Name of the table to store schema versions.
|
|
82
|
+
create_schema (bool): Whether to automatically create the database schema if it doesn't exist.
|
|
83
|
+
Set to False if schema is managed externally (e.g., via migrations). Defaults to True.
|
|
69
84
|
Raises:
|
|
70
85
|
ValueError: If neither db_url nor db_engine is provided.
|
|
71
86
|
ValueError: If none of the tables are provided.
|
|
@@ -79,10 +94,14 @@ class SingleStoreDb(BaseDb):
|
|
|
79
94
|
super().__init__(
|
|
80
95
|
id=id,
|
|
81
96
|
session_table=session_table,
|
|
97
|
+
culture_table=culture_table,
|
|
82
98
|
memory_table=memory_table,
|
|
83
99
|
metrics_table=metrics_table,
|
|
84
100
|
eval_table=eval_table,
|
|
85
101
|
knowledge_table=knowledge_table,
|
|
102
|
+
versions_table=versions_table,
|
|
103
|
+
traces_table=traces_table,
|
|
104
|
+
spans_table=spans_table,
|
|
86
105
|
)
|
|
87
106
|
|
|
88
107
|
_engine: Optional[Engine] = db_engine
|
|
@@ -100,14 +119,26 @@ class SingleStoreDb(BaseDb):
|
|
|
100
119
|
self.db_url: Optional[str] = db_url
|
|
101
120
|
self.db_engine: Engine = _engine
|
|
102
121
|
self.db_schema: Optional[str] = db_schema
|
|
103
|
-
self.metadata: MetaData = MetaData()
|
|
122
|
+
self.metadata: MetaData = MetaData(schema=self.db_schema)
|
|
123
|
+
self.create_schema: bool = create_schema
|
|
104
124
|
|
|
105
125
|
# Initialize database session
|
|
106
126
|
self.Session: scoped_session = scoped_session(sessionmaker(bind=self.db_engine))
|
|
107
127
|
|
|
108
128
|
# -- DB methods --
|
|
129
|
+
def table_exists(self, table_name: str) -> bool:
|
|
130
|
+
"""Check if a table with the given name exists in the SingleStore database.
|
|
131
|
+
|
|
132
|
+
Args:
|
|
133
|
+
table_name: Name of the table to check
|
|
134
|
+
|
|
135
|
+
Returns:
|
|
136
|
+
bool: True if the table exists in the database, False otherwise
|
|
137
|
+
"""
|
|
138
|
+
with self.Session() as sess:
|
|
139
|
+
return is_table_available(session=sess, table_name=table_name, db_schema=self.db_schema)
|
|
109
140
|
|
|
110
|
-
def _create_table_structure_only(self, table_name: str, table_type: str
|
|
141
|
+
def _create_table_structure_only(self, table_name: str, table_type: str) -> Table:
|
|
111
142
|
"""
|
|
112
143
|
Create a table structure definition without actually creating the table in the database.
|
|
113
144
|
Used to avoid autoload issues with SingleStore JSON types.
|
|
@@ -115,7 +146,6 @@ class SingleStoreDb(BaseDb):
|
|
|
115
146
|
Args:
|
|
116
147
|
table_name (str): Name of the table
|
|
117
148
|
table_type (str): Type of table (used to get schema definition)
|
|
118
|
-
db_schema (Optional[str]): Database schema name
|
|
119
149
|
|
|
120
150
|
Returns:
|
|
121
151
|
Table: SQLAlchemy Table object with column definitions
|
|
@@ -141,34 +171,44 @@ class SingleStoreDb(BaseDb):
|
|
|
141
171
|
columns.append(Column(*column_args, **column_kwargs))
|
|
142
172
|
|
|
143
173
|
# Create the table object without constraints to avoid autoload issues
|
|
144
|
-
|
|
145
|
-
table = Table(table_name, table_metadata, *columns, schema=db_schema)
|
|
174
|
+
table = Table(table_name, self.metadata, *columns, schema=self.db_schema)
|
|
146
175
|
|
|
147
176
|
return table
|
|
148
177
|
|
|
149
178
|
except Exception as e:
|
|
150
|
-
table_ref = f"{db_schema}.{table_name}" if db_schema else table_name
|
|
179
|
+
table_ref = f"{self.db_schema}.{table_name}" if self.db_schema else table_name
|
|
151
180
|
log_error(f"Could not create table structure for {table_ref}: {e}")
|
|
152
181
|
raise
|
|
153
182
|
|
|
154
|
-
def
|
|
183
|
+
def _create_all_tables(self):
|
|
184
|
+
"""Create all tables for the database."""
|
|
185
|
+
tables_to_create = [
|
|
186
|
+
(self.session_table_name, "sessions"),
|
|
187
|
+
(self.memory_table_name, "memories"),
|
|
188
|
+
(self.metrics_table_name, "metrics"),
|
|
189
|
+
(self.eval_table_name, "evals"),
|
|
190
|
+
(self.knowledge_table_name, "knowledge"),
|
|
191
|
+
(self.versions_table_name, "versions"),
|
|
192
|
+
]
|
|
193
|
+
|
|
194
|
+
for table_name, table_type in tables_to_create:
|
|
195
|
+
self._get_or_create_table(table_name=table_name, table_type=table_type, create_table_if_not_found=True)
|
|
196
|
+
|
|
197
|
+
def _create_table(self, table_name: str, table_type: str) -> Table:
|
|
155
198
|
"""
|
|
156
199
|
Create a table with the appropriate schema based on the table type.
|
|
157
200
|
|
|
158
201
|
Args:
|
|
159
202
|
table_name (str): Name of the table to create
|
|
160
203
|
table_type (str): Type of table (used to get schema definition)
|
|
161
|
-
db_schema (Optional[str]): Database schema name
|
|
162
204
|
|
|
163
205
|
Returns:
|
|
164
206
|
Table: SQLAlchemy Table object
|
|
165
207
|
"""
|
|
166
|
-
table_ref = f"{db_schema}.{table_name}" if db_schema else table_name
|
|
208
|
+
table_ref = f"{self.db_schema}.{table_name}" if self.db_schema else table_name
|
|
167
209
|
try:
|
|
168
210
|
table_schema = get_table_schema_definition(table_type)
|
|
169
211
|
|
|
170
|
-
log_debug(f"Creating table {table_ref} with schema: {table_schema}")
|
|
171
|
-
|
|
172
212
|
columns: List[Column] = []
|
|
173
213
|
indexes: List[str] = []
|
|
174
214
|
unique_constraints: List[str] = []
|
|
@@ -190,8 +230,7 @@ class SingleStoreDb(BaseDb):
|
|
|
190
230
|
columns.append(Column(*column_args, **column_kwargs))
|
|
191
231
|
|
|
192
232
|
# Create the table object
|
|
193
|
-
|
|
194
|
-
table = Table(table_name, table_metadata, *columns, schema=db_schema)
|
|
233
|
+
table = Table(table_name, self.metadata, *columns, schema=self.db_schema)
|
|
195
234
|
|
|
196
235
|
# Add multi-column unique constraints with table-specific names
|
|
197
236
|
for constraint in schema_unique_constraints:
|
|
@@ -205,48 +244,52 @@ class SingleStoreDb(BaseDb):
|
|
|
205
244
|
table.append_constraint(Index(idx_name, idx_col))
|
|
206
245
|
|
|
207
246
|
# Create schema if one is specified
|
|
208
|
-
if db_schema is not None:
|
|
247
|
+
if self.create_schema and self.db_schema is not None:
|
|
209
248
|
with self.Session() as sess, sess.begin():
|
|
210
|
-
create_schema(session=sess, db_schema=db_schema)
|
|
249
|
+
create_schema(session=sess, db_schema=self.db_schema)
|
|
211
250
|
|
|
212
251
|
# SingleStore has a limitation on the number of unique multi-field constraints per table.
|
|
213
252
|
# We need to work around that limitation for the sessions table.
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
col_sql
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
253
|
+
table_created = False
|
|
254
|
+
if not self.table_exists(table_name):
|
|
255
|
+
if table_type == "sessions":
|
|
256
|
+
with self.Session() as sess, sess.begin():
|
|
257
|
+
# Build column definitions
|
|
258
|
+
columns_sql = []
|
|
259
|
+
for col in table.columns:
|
|
260
|
+
col_sql = f"{col.name} {col.type.compile(self.db_engine.dialect)}"
|
|
261
|
+
if not col.nullable:
|
|
262
|
+
col_sql += " NOT NULL"
|
|
263
|
+
columns_sql.append(col_sql)
|
|
264
|
+
|
|
265
|
+
columns_def = ", ".join(columns_sql)
|
|
266
|
+
|
|
267
|
+
# Add shard key and single unique constraint
|
|
268
|
+
table_sql = f"""CREATE TABLE IF NOT EXISTS {table_ref} (
|
|
269
|
+
{columns_def},
|
|
270
|
+
SHARD KEY (session_id),
|
|
271
|
+
UNIQUE KEY uq_session_type (session_id, session_type)
|
|
272
|
+
)"""
|
|
273
|
+
|
|
274
|
+
sess.execute(text(table_sql))
|
|
275
|
+
else:
|
|
276
|
+
table.create(self.db_engine, checkfirst=True)
|
|
277
|
+
log_debug(f"Successfully created table '{table_ref}'")
|
|
278
|
+
table_created = True
|
|
234
279
|
else:
|
|
235
|
-
|
|
280
|
+
log_debug(f"Table '{table_ref}' already exists, skipping creation")
|
|
236
281
|
|
|
237
282
|
# Create indexes
|
|
238
283
|
for idx in table.indexes:
|
|
239
284
|
try:
|
|
240
|
-
log_debug(f"Creating index: {idx.name}")
|
|
241
|
-
|
|
242
285
|
# Check if index already exists
|
|
243
286
|
with self.Session() as sess:
|
|
244
|
-
if db_schema is not None:
|
|
287
|
+
if self.db_schema is not None:
|
|
245
288
|
exists_query = text(
|
|
246
289
|
"SELECT 1 FROM information_schema.statistics WHERE table_schema = :schema AND index_name = :index_name"
|
|
247
290
|
)
|
|
248
291
|
exists = (
|
|
249
|
-
sess.execute(exists_query, {"schema": db_schema, "index_name": idx.name}).scalar()
|
|
292
|
+
sess.execute(exists_query, {"schema": self.db_schema, "index_name": idx.name}).scalar()
|
|
250
293
|
is not None
|
|
251
294
|
)
|
|
252
295
|
else:
|
|
@@ -260,10 +303,15 @@ class SingleStoreDb(BaseDb):
|
|
|
260
303
|
|
|
261
304
|
idx.create(self.db_engine)
|
|
262
305
|
|
|
306
|
+
log_debug(f"Created index: {idx.name} for table {table_ref}")
|
|
263
307
|
except Exception as e:
|
|
264
308
|
log_error(f"Error creating index {idx.name}: {e}")
|
|
265
309
|
|
|
266
|
-
|
|
310
|
+
# Store the schema version for the created table
|
|
311
|
+
if table_name != self.versions_table_name and table_created:
|
|
312
|
+
latest_schema_version = MigrationManager(self).latest_schema_version
|
|
313
|
+
self.upsert_schema_version(table_name=table_name, version=latest_schema_version.public)
|
|
314
|
+
|
|
267
315
|
return table
|
|
268
316
|
|
|
269
317
|
except Exception as e:
|
|
@@ -275,7 +323,6 @@ class SingleStoreDb(BaseDb):
|
|
|
275
323
|
self.session_table = self._get_or_create_table(
|
|
276
324
|
table_name=self.session_table_name,
|
|
277
325
|
table_type="sessions",
|
|
278
|
-
db_schema=self.db_schema,
|
|
279
326
|
create_table_if_not_found=create_table_if_not_found,
|
|
280
327
|
)
|
|
281
328
|
return self.session_table
|
|
@@ -284,7 +331,6 @@ class SingleStoreDb(BaseDb):
|
|
|
284
331
|
self.memory_table = self._get_or_create_table(
|
|
285
332
|
table_name=self.memory_table_name,
|
|
286
333
|
table_type="memories",
|
|
287
|
-
db_schema=self.db_schema,
|
|
288
334
|
create_table_if_not_found=create_table_if_not_found,
|
|
289
335
|
)
|
|
290
336
|
return self.memory_table
|
|
@@ -293,7 +339,6 @@ class SingleStoreDb(BaseDb):
|
|
|
293
339
|
self.metrics_table = self._get_or_create_table(
|
|
294
340
|
table_name=self.metrics_table_name,
|
|
295
341
|
table_type="metrics",
|
|
296
|
-
db_schema=self.db_schema,
|
|
297
342
|
create_table_if_not_found=create_table_if_not_found,
|
|
298
343
|
)
|
|
299
344
|
return self.metrics_table
|
|
@@ -302,7 +347,6 @@ class SingleStoreDb(BaseDb):
|
|
|
302
347
|
self.eval_table = self._get_or_create_table(
|
|
303
348
|
table_name=self.eval_table_name,
|
|
304
349
|
table_type="evals",
|
|
305
|
-
db_schema=self.db_schema,
|
|
306
350
|
create_table_if_not_found=create_table_if_not_found,
|
|
307
351
|
)
|
|
308
352
|
return self.eval_table
|
|
@@ -311,18 +355,50 @@ class SingleStoreDb(BaseDb):
|
|
|
311
355
|
self.knowledge_table = self._get_or_create_table(
|
|
312
356
|
table_name=self.knowledge_table_name,
|
|
313
357
|
table_type="knowledge",
|
|
314
|
-
db_schema=self.db_schema,
|
|
315
358
|
create_table_if_not_found=create_table_if_not_found,
|
|
316
359
|
)
|
|
317
360
|
return self.knowledge_table
|
|
318
361
|
|
|
362
|
+
if table_type == "culture":
|
|
363
|
+
self.culture_table = self._get_or_create_table(
|
|
364
|
+
table_name=self.culture_table_name,
|
|
365
|
+
table_type="culture",
|
|
366
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
367
|
+
)
|
|
368
|
+
return self.culture_table
|
|
369
|
+
|
|
370
|
+
if table_type == "versions":
|
|
371
|
+
self.versions_table = self._get_or_create_table(
|
|
372
|
+
table_name=self.versions_table_name,
|
|
373
|
+
table_type="versions",
|
|
374
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
375
|
+
)
|
|
376
|
+
return self.versions_table
|
|
377
|
+
|
|
378
|
+
if table_type == "traces":
|
|
379
|
+
self.traces_table = self._get_or_create_table(
|
|
380
|
+
table_name=self.trace_table_name,
|
|
381
|
+
table_type="traces",
|
|
382
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
383
|
+
)
|
|
384
|
+
return self.traces_table
|
|
385
|
+
|
|
386
|
+
if table_type == "spans":
|
|
387
|
+
# Ensure traces table exists first (for foreign key)
|
|
388
|
+
self._get_table(table_type="traces", create_table_if_not_found=create_table_if_not_found)
|
|
389
|
+
self.spans_table = self._get_or_create_table(
|
|
390
|
+
table_name=self.span_table_name,
|
|
391
|
+
table_type="spans",
|
|
392
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
393
|
+
)
|
|
394
|
+
return self.spans_table
|
|
395
|
+
|
|
319
396
|
raise ValueError(f"Unknown table type: {table_type}")
|
|
320
397
|
|
|
321
398
|
def _get_or_create_table(
|
|
322
399
|
self,
|
|
323
400
|
table_name: str,
|
|
324
401
|
table_type: str,
|
|
325
|
-
db_schema: Optional[str],
|
|
326
402
|
create_table_if_not_found: Optional[bool] = False,
|
|
327
403
|
) -> Optional[Table]:
|
|
328
404
|
"""
|
|
@@ -331,37 +407,78 @@ class SingleStoreDb(BaseDb):
|
|
|
331
407
|
Args:
|
|
332
408
|
table_name (str): Name of the table to get or create
|
|
333
409
|
table_type (str): Type of table (used to get schema definition)
|
|
334
|
-
db_schema (Optional[str]): Database schema name
|
|
335
410
|
|
|
336
411
|
Returns:
|
|
337
412
|
Table: SQLAlchemy Table object representing the schema.
|
|
338
413
|
"""
|
|
339
414
|
|
|
340
415
|
with self.Session() as sess, sess.begin():
|
|
341
|
-
table_is_available = is_table_available(session=sess, table_name=table_name, db_schema=db_schema)
|
|
416
|
+
table_is_available = is_table_available(session=sess, table_name=table_name, db_schema=self.db_schema)
|
|
342
417
|
|
|
343
418
|
if not table_is_available:
|
|
344
419
|
if not create_table_if_not_found:
|
|
345
420
|
return None
|
|
346
|
-
|
|
421
|
+
|
|
422
|
+
# Also store the schema version for the created table
|
|
423
|
+
if table_name != self.versions_table_name:
|
|
424
|
+
latest_schema_version = MigrationManager(self).latest_schema_version
|
|
425
|
+
self.upsert_schema_version(table_name=table_name, version=latest_schema_version.public)
|
|
426
|
+
|
|
427
|
+
return self._create_table(table_name=table_name, table_type=table_type)
|
|
347
428
|
|
|
348
429
|
if not is_valid_table(
|
|
349
430
|
db_engine=self.db_engine,
|
|
350
431
|
table_name=table_name,
|
|
351
432
|
table_type=table_type,
|
|
352
|
-
db_schema=db_schema,
|
|
433
|
+
db_schema=self.db_schema,
|
|
353
434
|
):
|
|
354
|
-
table_ref = f"{db_schema}.{table_name}" if db_schema else table_name
|
|
435
|
+
table_ref = f"{self.db_schema}.{table_name}" if self.db_schema else table_name
|
|
355
436
|
raise ValueError(f"Table {table_ref} has an invalid schema")
|
|
356
437
|
|
|
357
438
|
try:
|
|
358
|
-
return self._create_table_structure_only(table_name=table_name, table_type=table_type
|
|
439
|
+
return self._create_table_structure_only(table_name=table_name, table_type=table_type)
|
|
359
440
|
|
|
360
441
|
except Exception as e:
|
|
361
|
-
table_ref = f"{db_schema}.{table_name}" if db_schema else table_name
|
|
442
|
+
table_ref = f"{self.db_schema}.{table_name}" if self.db_schema else table_name
|
|
362
443
|
log_error(f"Error loading existing table {table_ref}: {e}")
|
|
363
444
|
raise
|
|
364
445
|
|
|
446
|
+
def get_latest_schema_version(self, table_name: str) -> str:
|
|
447
|
+
"""Get the latest version of the database schema."""
|
|
448
|
+
table = self._get_table(table_type="versions", create_table_if_not_found=True)
|
|
449
|
+
if table is None:
|
|
450
|
+
return "2.0.0"
|
|
451
|
+
with self.Session() as sess:
|
|
452
|
+
stmt = select(table)
|
|
453
|
+
# Latest version for the given table
|
|
454
|
+
stmt = stmt.where(table.c.table_name == table_name)
|
|
455
|
+
stmt = stmt.order_by(table.c.version.desc()).limit(1)
|
|
456
|
+
result = sess.execute(stmt).fetchone()
|
|
457
|
+
if result is None:
|
|
458
|
+
return "2.0.0"
|
|
459
|
+
version_dict = dict(result._mapping)
|
|
460
|
+
return version_dict.get("version") or "2.0.0"
|
|
461
|
+
|
|
462
|
+
def upsert_schema_version(self, table_name: str, version: str) -> None:
|
|
463
|
+
"""Upsert the schema version into the database."""
|
|
464
|
+
table = self._get_table(table_type="versions", create_table_if_not_found=True)
|
|
465
|
+
if table is None:
|
|
466
|
+
return
|
|
467
|
+
current_datetime = datetime.now().isoformat()
|
|
468
|
+
with self.Session() as sess, sess.begin():
|
|
469
|
+
stmt = mysql.insert(table).values(
|
|
470
|
+
table_name=table_name,
|
|
471
|
+
version=version,
|
|
472
|
+
created_at=current_datetime, # Store as ISO format string
|
|
473
|
+
updated_at=current_datetime,
|
|
474
|
+
)
|
|
475
|
+
# Update version if table_name already exists
|
|
476
|
+
stmt = stmt.on_duplicate_key_update(
|
|
477
|
+
version=version,
|
|
478
|
+
updated_at=current_datetime,
|
|
479
|
+
)
|
|
480
|
+
sess.execute(stmt)
|
|
481
|
+
|
|
365
482
|
# -- Session methods --
|
|
366
483
|
def delete_session(self, session_id: str) -> bool:
|
|
367
484
|
"""
|
|
@@ -454,9 +571,6 @@ class SingleStoreDb(BaseDb):
|
|
|
454
571
|
|
|
455
572
|
if user_id is not None:
|
|
456
573
|
stmt = stmt.where(table.c.user_id == user_id)
|
|
457
|
-
if session_type is not None:
|
|
458
|
-
session_type_value = session_type.value if isinstance(session_type, SessionType) else session_type
|
|
459
|
-
stmt = stmt.where(table.c.session_type == session_type_value)
|
|
460
574
|
result = sess.execute(stmt).fetchone()
|
|
461
575
|
if result is None:
|
|
462
576
|
return None
|
|
@@ -797,7 +911,7 @@ class SingleStoreDb(BaseDb):
|
|
|
797
911
|
raise e
|
|
798
912
|
|
|
799
913
|
def upsert_sessions(
|
|
800
|
-
self, sessions: List[Session], deserialize: Optional[bool] = True
|
|
914
|
+
self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
|
|
801
915
|
) -> List[Union[Session, Dict[str, Any]]]:
|
|
802
916
|
"""
|
|
803
917
|
Bulk upsert multiple sessions for improved performance on large datasets.
|
|
@@ -841,6 +955,8 @@ class SingleStoreDb(BaseDb):
|
|
|
841
955
|
agent_data = []
|
|
842
956
|
for session in agent_sessions:
|
|
843
957
|
session_dict = session.to_dict()
|
|
958
|
+
# Use preserved updated_at if flag is set, otherwise use current time
|
|
959
|
+
updated_at = session_dict.get("updated_at") if preserve_updated_at else int(time.time())
|
|
844
960
|
agent_data.append(
|
|
845
961
|
{
|
|
846
962
|
"session_id": session_dict.get("session_id"),
|
|
@@ -853,7 +969,7 @@ class SingleStoreDb(BaseDb):
|
|
|
853
969
|
"summary": session_dict.get("summary"),
|
|
854
970
|
"metadata": session_dict.get("metadata"),
|
|
855
971
|
"created_at": session_dict.get("created_at"),
|
|
856
|
-
"updated_at":
|
|
972
|
+
"updated_at": updated_at,
|
|
857
973
|
}
|
|
858
974
|
)
|
|
859
975
|
|
|
@@ -867,7 +983,7 @@ class SingleStoreDb(BaseDb):
|
|
|
867
983
|
summary=stmt.inserted.summary,
|
|
868
984
|
metadata=stmt.inserted.metadata,
|
|
869
985
|
runs=stmt.inserted.runs,
|
|
870
|
-
updated_at=
|
|
986
|
+
updated_at=stmt.inserted.updated_at,
|
|
871
987
|
)
|
|
872
988
|
sess.execute(stmt, agent_data)
|
|
873
989
|
|
|
@@ -890,6 +1006,8 @@ class SingleStoreDb(BaseDb):
|
|
|
890
1006
|
team_data = []
|
|
891
1007
|
for session in team_sessions:
|
|
892
1008
|
session_dict = session.to_dict()
|
|
1009
|
+
# Use preserved updated_at if flag is set, otherwise use current time
|
|
1010
|
+
updated_at = session_dict.get("updated_at") if preserve_updated_at else int(time.time())
|
|
893
1011
|
team_data.append(
|
|
894
1012
|
{
|
|
895
1013
|
"session_id": session_dict.get("session_id"),
|
|
@@ -902,7 +1020,7 @@ class SingleStoreDb(BaseDb):
|
|
|
902
1020
|
"summary": session_dict.get("summary"),
|
|
903
1021
|
"metadata": session_dict.get("metadata"),
|
|
904
1022
|
"created_at": session_dict.get("created_at"),
|
|
905
|
-
"updated_at":
|
|
1023
|
+
"updated_at": updated_at,
|
|
906
1024
|
}
|
|
907
1025
|
)
|
|
908
1026
|
|
|
@@ -916,7 +1034,7 @@ class SingleStoreDb(BaseDb):
|
|
|
916
1034
|
summary=stmt.inserted.summary,
|
|
917
1035
|
metadata=stmt.inserted.metadata,
|
|
918
1036
|
runs=stmt.inserted.runs,
|
|
919
|
-
updated_at=
|
|
1037
|
+
updated_at=stmt.inserted.updated_at,
|
|
920
1038
|
)
|
|
921
1039
|
sess.execute(stmt, team_data)
|
|
922
1040
|
|
|
@@ -939,6 +1057,8 @@ class SingleStoreDb(BaseDb):
|
|
|
939
1057
|
workflow_data = []
|
|
940
1058
|
for session in workflow_sessions:
|
|
941
1059
|
session_dict = session.to_dict()
|
|
1060
|
+
# Use preserved updated_at if flag is set, otherwise use current time
|
|
1061
|
+
updated_at = session_dict.get("updated_at") if preserve_updated_at else int(time.time())
|
|
942
1062
|
workflow_data.append(
|
|
943
1063
|
{
|
|
944
1064
|
"session_id": session_dict.get("session_id"),
|
|
@@ -951,7 +1071,7 @@ class SingleStoreDb(BaseDb):
|
|
|
951
1071
|
"summary": session_dict.get("summary"),
|
|
952
1072
|
"metadata": session_dict.get("metadata"),
|
|
953
1073
|
"created_at": session_dict.get("created_at"),
|
|
954
|
-
"updated_at":
|
|
1074
|
+
"updated_at": updated_at,
|
|
955
1075
|
}
|
|
956
1076
|
)
|
|
957
1077
|
|
|
@@ -965,7 +1085,7 @@ class SingleStoreDb(BaseDb):
|
|
|
965
1085
|
summary=stmt.inserted.summary,
|
|
966
1086
|
metadata=stmt.inserted.metadata,
|
|
967
1087
|
runs=stmt.inserted.runs,
|
|
968
|
-
updated_at=
|
|
1088
|
+
updated_at=stmt.inserted.updated_at,
|
|
969
1089
|
)
|
|
970
1090
|
sess.execute(stmt, workflow_data)
|
|
971
1091
|
|
|
@@ -1205,13 +1325,14 @@ class SingleStoreDb(BaseDb):
|
|
|
1205
1325
|
raise e
|
|
1206
1326
|
|
|
1207
1327
|
def get_user_memory_stats(
|
|
1208
|
-
self, limit: Optional[int] = None, page: Optional[int] = None
|
|
1328
|
+
self, limit: Optional[int] = None, page: Optional[int] = None, user_id: Optional[str] = None
|
|
1209
1329
|
) -> Tuple[List[Dict[str, Any]], int]:
|
|
1210
1330
|
"""Get user memories stats.
|
|
1211
1331
|
|
|
1212
1332
|
Args:
|
|
1213
1333
|
limit (Optional[int]): The maximum number of user stats to return.
|
|
1214
1334
|
page (Optional[int]): The page number.
|
|
1335
|
+
user_id (Optional[str]): User ID for filtering.
|
|
1215
1336
|
|
|
1216
1337
|
Returns:
|
|
1217
1338
|
Tuple[List[Dict[str, Any]], int]: A list of dictionaries containing user stats and total count.
|
|
@@ -1234,16 +1355,17 @@ class SingleStoreDb(BaseDb):
|
|
|
1234
1355
|
return [], 0
|
|
1235
1356
|
|
|
1236
1357
|
with self.Session() as sess, sess.begin():
|
|
1237
|
-
stmt = (
|
|
1238
|
-
|
|
1239
|
-
|
|
1240
|
-
|
|
1241
|
-
func.max(table.c.updated_at).label("last_memory_updated_at"),
|
|
1242
|
-
)
|
|
1243
|
-
.where(table.c.user_id.is_not(None))
|
|
1244
|
-
.group_by(table.c.user_id)
|
|
1245
|
-
.order_by(func.max(table.c.updated_at).desc())
|
|
1358
|
+
stmt = select(
|
|
1359
|
+
table.c.user_id,
|
|
1360
|
+
func.count(table.c.memory_id).label("total_memories"),
|
|
1361
|
+
func.max(table.c.updated_at).label("last_memory_updated_at"),
|
|
1246
1362
|
)
|
|
1363
|
+
if user_id is not None:
|
|
1364
|
+
stmt = stmt.where(table.c.user_id == user_id)
|
|
1365
|
+
else:
|
|
1366
|
+
stmt = stmt.where(table.c.user_id.is_not(None))
|
|
1367
|
+
stmt = stmt.group_by(table.c.user_id)
|
|
1368
|
+
stmt = stmt.order_by(func.max(table.c.updated_at).desc())
|
|
1247
1369
|
|
|
1248
1370
|
count_stmt = select(func.count()).select_from(stmt.alias())
|
|
1249
1371
|
total_count = sess.execute(count_stmt).scalar()
|
|
@@ -1297,6 +1419,8 @@ class SingleStoreDb(BaseDb):
|
|
|
1297
1419
|
if memory.memory_id is None:
|
|
1298
1420
|
memory.memory_id = str(uuid4())
|
|
1299
1421
|
|
|
1422
|
+
current_time = int(time.time())
|
|
1423
|
+
|
|
1300
1424
|
stmt = mysql.insert(table).values(
|
|
1301
1425
|
memory_id=memory.memory_id,
|
|
1302
1426
|
memory=memory.memory,
|
|
@@ -1305,7 +1429,9 @@ class SingleStoreDb(BaseDb):
|
|
|
1305
1429
|
agent_id=memory.agent_id,
|
|
1306
1430
|
team_id=memory.team_id,
|
|
1307
1431
|
topics=memory.topics,
|
|
1308
|
-
|
|
1432
|
+
feedback=memory.feedback,
|
|
1433
|
+
created_at=memory.created_at,
|
|
1434
|
+
updated_at=current_time,
|
|
1309
1435
|
)
|
|
1310
1436
|
stmt = stmt.on_duplicate_key_update(
|
|
1311
1437
|
memory=stmt.inserted.memory,
|
|
@@ -1314,7 +1440,10 @@ class SingleStoreDb(BaseDb):
|
|
|
1314
1440
|
user_id=stmt.inserted.user_id,
|
|
1315
1441
|
agent_id=stmt.inserted.agent_id,
|
|
1316
1442
|
team_id=stmt.inserted.team_id,
|
|
1317
|
-
|
|
1443
|
+
feedback=stmt.inserted.feedback,
|
|
1444
|
+
updated_at=stmt.inserted.updated_at,
|
|
1445
|
+
# Preserve created_at on update - don't overwrite existing value
|
|
1446
|
+
created_at=table.c.created_at,
|
|
1318
1447
|
)
|
|
1319
1448
|
|
|
1320
1449
|
sess.execute(stmt)
|
|
@@ -1336,7 +1465,7 @@ class SingleStoreDb(BaseDb):
|
|
|
1336
1465
|
raise e
|
|
1337
1466
|
|
|
1338
1467
|
def upsert_memories(
|
|
1339
|
-
self, memories: List[UserMemory], deserialize: Optional[bool] = True
|
|
1468
|
+
self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
|
|
1340
1469
|
) -> List[Union[UserMemory, Dict[str, Any]]]:
|
|
1341
1470
|
"""
|
|
1342
1471
|
Bulk upsert multiple user memories for improved performance on large datasets.
|
|
@@ -1361,9 +1490,14 @@ class SingleStoreDb(BaseDb):
|
|
|
1361
1490
|
|
|
1362
1491
|
# Prepare data for bulk insert
|
|
1363
1492
|
memory_data = []
|
|
1493
|
+
current_time = int(time.time())
|
|
1494
|
+
|
|
1364
1495
|
for memory in memories:
|
|
1365
1496
|
if memory.memory_id is None:
|
|
1366
1497
|
memory.memory_id = str(uuid4())
|
|
1498
|
+
# Use preserved updated_at if flag is set, otherwise use current time
|
|
1499
|
+
updated_at = memory.updated_at if preserve_updated_at else current_time
|
|
1500
|
+
|
|
1367
1501
|
memory_data.append(
|
|
1368
1502
|
{
|
|
1369
1503
|
"memory_id": memory.memory_id,
|
|
@@ -1373,7 +1507,9 @@ class SingleStoreDb(BaseDb):
|
|
|
1373
1507
|
"agent_id": memory.agent_id,
|
|
1374
1508
|
"team_id": memory.team_id,
|
|
1375
1509
|
"topics": memory.topics,
|
|
1376
|
-
"
|
|
1510
|
+
"feedback": memory.feedback,
|
|
1511
|
+
"created_at": memory.created_at,
|
|
1512
|
+
"updated_at": updated_at,
|
|
1377
1513
|
}
|
|
1378
1514
|
)
|
|
1379
1515
|
|
|
@@ -1389,7 +1525,10 @@ class SingleStoreDb(BaseDb):
|
|
|
1389
1525
|
user_id=stmt.inserted.user_id,
|
|
1390
1526
|
agent_id=stmt.inserted.agent_id,
|
|
1391
1527
|
team_id=stmt.inserted.team_id,
|
|
1392
|
-
|
|
1528
|
+
feedback=stmt.inserted.feedback,
|
|
1529
|
+
updated_at=stmt.inserted.updated_at,
|
|
1530
|
+
# Preserve created_at on update
|
|
1531
|
+
created_at=table.c.created_at,
|
|
1393
1532
|
)
|
|
1394
1533
|
sess.execute(stmt, memory_data)
|
|
1395
1534
|
|
|
@@ -2009,3 +2148,730 @@ class SingleStoreDb(BaseDb):
|
|
|
2009
2148
|
except Exception as e:
|
|
2010
2149
|
log_error(f"Error renaming eval run {eval_run_id}: {e}")
|
|
2011
2150
|
raise e
|
|
2151
|
+
|
|
2152
|
+
# -- Culture methods --
|
|
2153
|
+
|
|
2154
|
+
def clear_cultural_knowledge(self) -> None:
|
|
2155
|
+
"""Delete all cultural knowledge from the database.
|
|
2156
|
+
|
|
2157
|
+
Raises:
|
|
2158
|
+
Exception: If an error occurs during deletion.
|
|
2159
|
+
"""
|
|
2160
|
+
try:
|
|
2161
|
+
table = self._get_table(table_type="culture")
|
|
2162
|
+
if table is None:
|
|
2163
|
+
return
|
|
2164
|
+
|
|
2165
|
+
with self.Session() as sess, sess.begin():
|
|
2166
|
+
sess.execute(table.delete())
|
|
2167
|
+
|
|
2168
|
+
except Exception as e:
|
|
2169
|
+
log_warning(f"Exception deleting all cultural knowledge: {e}")
|
|
2170
|
+
raise e
|
|
2171
|
+
|
|
2172
|
+
def delete_cultural_knowledge(self, id: str) -> None:
|
|
2173
|
+
"""Delete a cultural knowledge entry from the database.
|
|
2174
|
+
|
|
2175
|
+
Args:
|
|
2176
|
+
id (str): The ID of the cultural knowledge to delete.
|
|
2177
|
+
|
|
2178
|
+
Raises:
|
|
2179
|
+
Exception: If an error occurs during deletion.
|
|
2180
|
+
"""
|
|
2181
|
+
try:
|
|
2182
|
+
table = self._get_table(table_type="culture")
|
|
2183
|
+
if table is None:
|
|
2184
|
+
return
|
|
2185
|
+
|
|
2186
|
+
with self.Session() as sess, sess.begin():
|
|
2187
|
+
delete_stmt = table.delete().where(table.c.id == id)
|
|
2188
|
+
result = sess.execute(delete_stmt)
|
|
2189
|
+
|
|
2190
|
+
success = result.rowcount > 0
|
|
2191
|
+
if success:
|
|
2192
|
+
log_debug(f"Successfully deleted cultural knowledge id: {id}")
|
|
2193
|
+
else:
|
|
2194
|
+
log_debug(f"No cultural knowledge found with id: {id}")
|
|
2195
|
+
|
|
2196
|
+
except Exception as e:
|
|
2197
|
+
log_error(f"Error deleting cultural knowledge: {e}")
|
|
2198
|
+
raise e
|
|
2199
|
+
|
|
2200
|
+
def get_cultural_knowledge(
|
|
2201
|
+
self, id: str, deserialize: Optional[bool] = True
|
|
2202
|
+
) -> Optional[Union[CulturalKnowledge, Dict[str, Any]]]:
|
|
2203
|
+
"""Get a cultural knowledge entry from the database.
|
|
2204
|
+
|
|
2205
|
+
Args:
|
|
2206
|
+
id (str): The ID of the cultural knowledge to get.
|
|
2207
|
+
deserialize (Optional[bool]): Whether to deserialize the cultural knowledge. Defaults to True.
|
|
2208
|
+
|
|
2209
|
+
Returns:
|
|
2210
|
+
Optional[Union[CulturalKnowledge, Dict[str, Any]]]: The cultural knowledge entry, or None if it doesn't exist.
|
|
2211
|
+
|
|
2212
|
+
Raises:
|
|
2213
|
+
Exception: If an error occurs during retrieval.
|
|
2214
|
+
"""
|
|
2215
|
+
try:
|
|
2216
|
+
table = self._get_table(table_type="culture")
|
|
2217
|
+
if table is None:
|
|
2218
|
+
return None
|
|
2219
|
+
|
|
2220
|
+
with self.Session() as sess, sess.begin():
|
|
2221
|
+
stmt = select(table).where(table.c.id == id)
|
|
2222
|
+
result = sess.execute(stmt).fetchone()
|
|
2223
|
+
if result is None:
|
|
2224
|
+
return None
|
|
2225
|
+
|
|
2226
|
+
db_row = dict(result._mapping)
|
|
2227
|
+
if not db_row or not deserialize:
|
|
2228
|
+
return db_row
|
|
2229
|
+
|
|
2230
|
+
return deserialize_cultural_knowledge_from_db(db_row)
|
|
2231
|
+
|
|
2232
|
+
except Exception as e:
|
|
2233
|
+
log_error(f"Exception reading from cultural knowledge table: {e}")
|
|
2234
|
+
raise e
|
|
2235
|
+
|
|
2236
|
+
def get_all_cultural_knowledge(
|
|
2237
|
+
self,
|
|
2238
|
+
name: Optional[str] = None,
|
|
2239
|
+
agent_id: Optional[str] = None,
|
|
2240
|
+
team_id: Optional[str] = None,
|
|
2241
|
+
limit: Optional[int] = None,
|
|
2242
|
+
page: Optional[int] = None,
|
|
2243
|
+
sort_by: Optional[str] = None,
|
|
2244
|
+
sort_order: Optional[str] = None,
|
|
2245
|
+
deserialize: Optional[bool] = True,
|
|
2246
|
+
) -> Union[List[CulturalKnowledge], Tuple[List[Dict[str, Any]], int]]:
|
|
2247
|
+
"""Get all cultural knowledge from the database as CulturalKnowledge objects.
|
|
2248
|
+
|
|
2249
|
+
Args:
|
|
2250
|
+
name (Optional[str]): The name of the cultural knowledge to filter by.
|
|
2251
|
+
agent_id (Optional[str]): The ID of the agent to filter by.
|
|
2252
|
+
team_id (Optional[str]): The ID of the team to filter by.
|
|
2253
|
+
limit (Optional[int]): The maximum number of cultural knowledge entries to return.
|
|
2254
|
+
page (Optional[int]): The page number.
|
|
2255
|
+
sort_by (Optional[str]): The column to sort by.
|
|
2256
|
+
sort_order (Optional[str]): The order to sort by.
|
|
2257
|
+
deserialize (Optional[bool]): Whether to deserialize the cultural knowledge. Defaults to True.
|
|
2258
|
+
|
|
2259
|
+
Returns:
|
|
2260
|
+
Union[List[CulturalKnowledge], Tuple[List[Dict[str, Any]], int]]:
|
|
2261
|
+
- When deserialize=True: List of CulturalKnowledge objects
|
|
2262
|
+
- When deserialize=False: List of CulturalKnowledge dictionaries and total count
|
|
2263
|
+
|
|
2264
|
+
Raises:
|
|
2265
|
+
Exception: If an error occurs during retrieval.
|
|
2266
|
+
"""
|
|
2267
|
+
try:
|
|
2268
|
+
table = self._get_table(table_type="culture")
|
|
2269
|
+
if table is None:
|
|
2270
|
+
return [] if deserialize else ([], 0)
|
|
2271
|
+
|
|
2272
|
+
with self.Session() as sess, sess.begin():
|
|
2273
|
+
stmt = select(table)
|
|
2274
|
+
|
|
2275
|
+
# Filtering
|
|
2276
|
+
if name is not None:
|
|
2277
|
+
stmt = stmt.where(table.c.name == name)
|
|
2278
|
+
if agent_id is not None:
|
|
2279
|
+
stmt = stmt.where(table.c.agent_id == agent_id)
|
|
2280
|
+
if team_id is not None:
|
|
2281
|
+
stmt = stmt.where(table.c.team_id == team_id)
|
|
2282
|
+
|
|
2283
|
+
# Get total count after applying filtering
|
|
2284
|
+
count_stmt = select(func.count()).select_from(stmt.alias())
|
|
2285
|
+
total_count = sess.execute(count_stmt).scalar()
|
|
2286
|
+
|
|
2287
|
+
# Sorting
|
|
2288
|
+
stmt = apply_sorting(stmt, table, sort_by, sort_order)
|
|
2289
|
+
# Paginating
|
|
2290
|
+
if limit is not None:
|
|
2291
|
+
stmt = stmt.limit(limit)
|
|
2292
|
+
if page is not None:
|
|
2293
|
+
stmt = stmt.offset((page - 1) * limit)
|
|
2294
|
+
|
|
2295
|
+
result = sess.execute(stmt).fetchall()
|
|
2296
|
+
if not result:
|
|
2297
|
+
return [] if deserialize else ([], 0)
|
|
2298
|
+
|
|
2299
|
+
db_rows = [dict(record._mapping) for record in result]
|
|
2300
|
+
|
|
2301
|
+
if not deserialize:
|
|
2302
|
+
return db_rows, total_count
|
|
2303
|
+
|
|
2304
|
+
return [deserialize_cultural_knowledge_from_db(row) for row in db_rows]
|
|
2305
|
+
|
|
2306
|
+
except Exception as e:
|
|
2307
|
+
log_error(f"Error reading from cultural knowledge table: {e}")
|
|
2308
|
+
raise e
|
|
2309
|
+
|
|
2310
|
+
def upsert_cultural_knowledge(
|
|
2311
|
+
self, cultural_knowledge: CulturalKnowledge, deserialize: Optional[bool] = True
|
|
2312
|
+
) -> Optional[Union[CulturalKnowledge, Dict[str, Any]]]:
|
|
2313
|
+
"""Upsert a cultural knowledge entry into the database.
|
|
2314
|
+
|
|
2315
|
+
Args:
|
|
2316
|
+
cultural_knowledge (CulturalKnowledge): The cultural knowledge to upsert.
|
|
2317
|
+
deserialize (Optional[bool]): Whether to deserialize the cultural knowledge. Defaults to True.
|
|
2318
|
+
|
|
2319
|
+
Returns:
|
|
2320
|
+
Optional[CulturalKnowledge]: The upserted cultural knowledge entry.
|
|
2321
|
+
|
|
2322
|
+
Raises:
|
|
2323
|
+
Exception: If an error occurs during upsert.
|
|
2324
|
+
"""
|
|
2325
|
+
try:
|
|
2326
|
+
table = self._get_table(table_type="culture", create_table_if_not_found=True)
|
|
2327
|
+
if table is None:
|
|
2328
|
+
return None
|
|
2329
|
+
|
|
2330
|
+
if cultural_knowledge.id is None:
|
|
2331
|
+
cultural_knowledge.id = str(uuid4())
|
|
2332
|
+
|
|
2333
|
+
# Serialize content, categories, and notes into a JSON dict for DB storage
|
|
2334
|
+
content_dict = serialize_cultural_knowledge_for_db(cultural_knowledge)
|
|
2335
|
+
|
|
2336
|
+
with self.Session() as sess, sess.begin():
|
|
2337
|
+
stmt = mysql.insert(table).values(
|
|
2338
|
+
id=cultural_knowledge.id,
|
|
2339
|
+
name=cultural_knowledge.name,
|
|
2340
|
+
summary=cultural_knowledge.summary,
|
|
2341
|
+
content=content_dict if content_dict else None,
|
|
2342
|
+
metadata=cultural_knowledge.metadata,
|
|
2343
|
+
input=cultural_knowledge.input,
|
|
2344
|
+
created_at=cultural_knowledge.created_at,
|
|
2345
|
+
updated_at=int(time.time()),
|
|
2346
|
+
agent_id=cultural_knowledge.agent_id,
|
|
2347
|
+
team_id=cultural_knowledge.team_id,
|
|
2348
|
+
)
|
|
2349
|
+
stmt = stmt.on_duplicate_key_update(
|
|
2350
|
+
name=cultural_knowledge.name,
|
|
2351
|
+
summary=cultural_knowledge.summary,
|
|
2352
|
+
content=content_dict if content_dict else None,
|
|
2353
|
+
metadata=cultural_knowledge.metadata,
|
|
2354
|
+
input=cultural_knowledge.input,
|
|
2355
|
+
updated_at=int(time.time()),
|
|
2356
|
+
agent_id=cultural_knowledge.agent_id,
|
|
2357
|
+
team_id=cultural_knowledge.team_id,
|
|
2358
|
+
)
|
|
2359
|
+
sess.execute(stmt)
|
|
2360
|
+
|
|
2361
|
+
# Fetch the inserted/updated row
|
|
2362
|
+
return self.get_cultural_knowledge(id=cultural_knowledge.id, deserialize=deserialize)
|
|
2363
|
+
|
|
2364
|
+
except Exception as e:
|
|
2365
|
+
log_error(f"Error upserting cultural knowledge: {e}")
|
|
2366
|
+
raise e
|
|
2367
|
+
|
|
2368
|
+
# --- Traces ---
|
|
2369
|
+
def _get_traces_base_query(self, table: Table, spans_table: Optional[Table] = None):
|
|
2370
|
+
"""Build base query for traces with aggregated span counts.
|
|
2371
|
+
|
|
2372
|
+
Args:
|
|
2373
|
+
table: The traces table.
|
|
2374
|
+
spans_table: The spans table (optional).
|
|
2375
|
+
|
|
2376
|
+
Returns:
|
|
2377
|
+
SQLAlchemy select statement with total_spans and error_count calculated dynamically.
|
|
2378
|
+
"""
|
|
2379
|
+
from sqlalchemy import case, literal
|
|
2380
|
+
|
|
2381
|
+
if spans_table is not None:
|
|
2382
|
+
# JOIN with spans table to calculate total_spans and error_count
|
|
2383
|
+
return (
|
|
2384
|
+
select(
|
|
2385
|
+
table,
|
|
2386
|
+
func.coalesce(func.count(spans_table.c.span_id), 0).label("total_spans"),
|
|
2387
|
+
func.coalesce(func.sum(case((spans_table.c.status_code == "ERROR", 1), else_=0)), 0).label(
|
|
2388
|
+
"error_count"
|
|
2389
|
+
),
|
|
2390
|
+
)
|
|
2391
|
+
.select_from(table.outerjoin(spans_table, table.c.trace_id == spans_table.c.trace_id))
|
|
2392
|
+
.group_by(table.c.trace_id)
|
|
2393
|
+
)
|
|
2394
|
+
else:
|
|
2395
|
+
# Fallback if spans table doesn't exist
|
|
2396
|
+
return select(table, literal(0).label("total_spans"), literal(0).label("error_count"))
|
|
2397
|
+
|
|
2398
|
+
def _get_trace_component_level_expr(self, workflow_id_col, team_id_col, agent_id_col, name_col):
|
|
2399
|
+
"""Build a SQL CASE expression that returns the component level for a trace.
|
|
2400
|
+
|
|
2401
|
+
Component levels (higher = more important):
|
|
2402
|
+
- 3: Workflow root (.run or .arun with workflow_id)
|
|
2403
|
+
- 2: Team root (.run or .arun with team_id)
|
|
2404
|
+
- 1: Agent root (.run or .arun with agent_id)
|
|
2405
|
+
- 0: Child span (not a root)
|
|
2406
|
+
|
|
2407
|
+
Args:
|
|
2408
|
+
workflow_id_col: SQL column/expression for workflow_id
|
|
2409
|
+
team_id_col: SQL column/expression for team_id
|
|
2410
|
+
agent_id_col: SQL column/expression for agent_id
|
|
2411
|
+
name_col: SQL column/expression for name
|
|
2412
|
+
|
|
2413
|
+
Returns:
|
|
2414
|
+
SQLAlchemy CASE expression returning the component level as an integer.
|
|
2415
|
+
"""
|
|
2416
|
+
from sqlalchemy import case, or_
|
|
2417
|
+
|
|
2418
|
+
is_root_name = or_(name_col.like("%.run%"), name_col.like("%.arun%"))
|
|
2419
|
+
|
|
2420
|
+
return case(
|
|
2421
|
+
# Workflow root (level 3)
|
|
2422
|
+
(and_(workflow_id_col.isnot(None), is_root_name), 3),
|
|
2423
|
+
# Team root (level 2)
|
|
2424
|
+
(and_(team_id_col.isnot(None), is_root_name), 2),
|
|
2425
|
+
# Agent root (level 1)
|
|
2426
|
+
(and_(agent_id_col.isnot(None), is_root_name), 1),
|
|
2427
|
+
# Child span or unknown (level 0)
|
|
2428
|
+
else_=0,
|
|
2429
|
+
)
|
|
2430
|
+
|
|
2431
|
+
def upsert_trace(self, trace: "Trace") -> None:
|
|
2432
|
+
"""Create or update a single trace record in the database.
|
|
2433
|
+
|
|
2434
|
+
Uses INSERT ... ON DUPLICATE KEY UPDATE (upsert) to handle concurrent inserts
|
|
2435
|
+
atomically and avoid race conditions.
|
|
2436
|
+
|
|
2437
|
+
Args:
|
|
2438
|
+
trace: The Trace object to store (one per trace_id).
|
|
2439
|
+
"""
|
|
2440
|
+
from sqlalchemy import case
|
|
2441
|
+
|
|
2442
|
+
try:
|
|
2443
|
+
table = self._get_table(table_type="traces", create_table_if_not_found=True)
|
|
2444
|
+
if table is None:
|
|
2445
|
+
return
|
|
2446
|
+
|
|
2447
|
+
trace_dict = trace.to_dict()
|
|
2448
|
+
trace_dict.pop("total_spans", None)
|
|
2449
|
+
trace_dict.pop("error_count", None)
|
|
2450
|
+
|
|
2451
|
+
with self.Session() as sess, sess.begin():
|
|
2452
|
+
# Use upsert to handle concurrent inserts atomically
|
|
2453
|
+
# On conflict, update fields while preserving existing non-null context values
|
|
2454
|
+
# and keeping the earliest start_time
|
|
2455
|
+
insert_stmt = mysql.insert(table).values(trace_dict)
|
|
2456
|
+
|
|
2457
|
+
# Build component level expressions for comparing trace priority
|
|
2458
|
+
new_level = self._get_trace_component_level_expr(
|
|
2459
|
+
insert_stmt.inserted.workflow_id,
|
|
2460
|
+
insert_stmt.inserted.team_id,
|
|
2461
|
+
insert_stmt.inserted.agent_id,
|
|
2462
|
+
insert_stmt.inserted.name,
|
|
2463
|
+
)
|
|
2464
|
+
existing_level = self._get_trace_component_level_expr(
|
|
2465
|
+
table.c.workflow_id,
|
|
2466
|
+
table.c.team_id,
|
|
2467
|
+
table.c.agent_id,
|
|
2468
|
+
table.c.name,
|
|
2469
|
+
)
|
|
2470
|
+
|
|
2471
|
+
# Build the ON DUPLICATE KEY UPDATE clause
|
|
2472
|
+
# Use LEAST for start_time, GREATEST for end_time to capture full trace duration
|
|
2473
|
+
# Duration is calculated using TIMESTAMPDIFF in microseconds then converted to ms
|
|
2474
|
+
upsert_stmt = insert_stmt.on_duplicate_key_update(
|
|
2475
|
+
end_time=func.greatest(table.c.end_time, insert_stmt.inserted.end_time),
|
|
2476
|
+
start_time=func.least(table.c.start_time, insert_stmt.inserted.start_time),
|
|
2477
|
+
# Calculate duration in milliseconds using TIMESTAMPDIFF
|
|
2478
|
+
# TIMESTAMPDIFF(MICROSECOND, start, end) / 1000 gives milliseconds
|
|
2479
|
+
duration_ms=func.timestampdiff(
|
|
2480
|
+
text("MICROSECOND"),
|
|
2481
|
+
func.least(table.c.start_time, insert_stmt.inserted.start_time),
|
|
2482
|
+
func.greatest(table.c.end_time, insert_stmt.inserted.end_time),
|
|
2483
|
+
)
|
|
2484
|
+
/ 1000,
|
|
2485
|
+
status=insert_stmt.inserted.status,
|
|
2486
|
+
# Update name only if new trace is from a higher-level component
|
|
2487
|
+
# Priority: workflow (3) > team (2) > agent (1) > child spans (0)
|
|
2488
|
+
name=case(
|
|
2489
|
+
(new_level > existing_level, insert_stmt.inserted.name),
|
|
2490
|
+
else_=table.c.name,
|
|
2491
|
+
),
|
|
2492
|
+
# Preserve existing non-null context values using COALESCE
|
|
2493
|
+
run_id=func.coalesce(insert_stmt.inserted.run_id, table.c.run_id),
|
|
2494
|
+
session_id=func.coalesce(insert_stmt.inserted.session_id, table.c.session_id),
|
|
2495
|
+
user_id=func.coalesce(insert_stmt.inserted.user_id, table.c.user_id),
|
|
2496
|
+
agent_id=func.coalesce(insert_stmt.inserted.agent_id, table.c.agent_id),
|
|
2497
|
+
team_id=func.coalesce(insert_stmt.inserted.team_id, table.c.team_id),
|
|
2498
|
+
workflow_id=func.coalesce(insert_stmt.inserted.workflow_id, table.c.workflow_id),
|
|
2499
|
+
)
|
|
2500
|
+
sess.execute(upsert_stmt)
|
|
2501
|
+
|
|
2502
|
+
except Exception as e:
|
|
2503
|
+
log_error(f"Error creating trace: {e}")
|
|
2504
|
+
# Don't raise - tracing should not break the main application flow
|
|
2505
|
+
|
|
2506
|
+
def get_trace(
|
|
2507
|
+
self,
|
|
2508
|
+
trace_id: Optional[str] = None,
|
|
2509
|
+
run_id: Optional[str] = None,
|
|
2510
|
+
):
|
|
2511
|
+
"""Get a single trace by trace_id or other filters.
|
|
2512
|
+
|
|
2513
|
+
Args:
|
|
2514
|
+
trace_id: The unique trace identifier.
|
|
2515
|
+
run_id: Filter by run ID (returns first match).
|
|
2516
|
+
|
|
2517
|
+
Returns:
|
|
2518
|
+
Optional[Trace]: The trace if found, None otherwise.
|
|
2519
|
+
|
|
2520
|
+
Note:
|
|
2521
|
+
If multiple filters are provided, trace_id takes precedence.
|
|
2522
|
+
For other filters, the most recent trace is returned.
|
|
2523
|
+
"""
|
|
2524
|
+
try:
|
|
2525
|
+
from agno.tracing.schemas import Trace
|
|
2526
|
+
|
|
2527
|
+
table = self._get_table(table_type="traces")
|
|
2528
|
+
if table is None:
|
|
2529
|
+
return None
|
|
2530
|
+
|
|
2531
|
+
# Get spans table for JOIN
|
|
2532
|
+
spans_table = self._get_table(table_type="spans")
|
|
2533
|
+
|
|
2534
|
+
with self.Session() as sess:
|
|
2535
|
+
# Build query with aggregated span counts
|
|
2536
|
+
stmt = self._get_traces_base_query(table, spans_table)
|
|
2537
|
+
|
|
2538
|
+
if trace_id:
|
|
2539
|
+
stmt = stmt.where(table.c.trace_id == trace_id)
|
|
2540
|
+
elif run_id:
|
|
2541
|
+
stmt = stmt.where(table.c.run_id == run_id)
|
|
2542
|
+
else:
|
|
2543
|
+
log_debug("get_trace called without any filter parameters")
|
|
2544
|
+
return None
|
|
2545
|
+
|
|
2546
|
+
# Order by most recent and get first result
|
|
2547
|
+
stmt = stmt.order_by(table.c.start_time.desc()).limit(1)
|
|
2548
|
+
result = sess.execute(stmt).fetchone()
|
|
2549
|
+
|
|
2550
|
+
if result:
|
|
2551
|
+
return Trace.from_dict(dict(result._mapping))
|
|
2552
|
+
return None
|
|
2553
|
+
|
|
2554
|
+
except Exception as e:
|
|
2555
|
+
log_error(f"Error getting trace: {e}")
|
|
2556
|
+
return None
|
|
2557
|
+
|
|
2558
|
+
def get_traces(
|
|
2559
|
+
self,
|
|
2560
|
+
run_id: Optional[str] = None,
|
|
2561
|
+
session_id: Optional[str] = None,
|
|
2562
|
+
user_id: Optional[str] = None,
|
|
2563
|
+
agent_id: Optional[str] = None,
|
|
2564
|
+
team_id: Optional[str] = None,
|
|
2565
|
+
workflow_id: Optional[str] = None,
|
|
2566
|
+
status: Optional[str] = None,
|
|
2567
|
+
start_time: Optional[datetime] = None,
|
|
2568
|
+
end_time: Optional[datetime] = None,
|
|
2569
|
+
limit: Optional[int] = 20,
|
|
2570
|
+
page: Optional[int] = 1,
|
|
2571
|
+
) -> tuple[List, int]:
|
|
2572
|
+
"""Get traces matching the provided filters.
|
|
2573
|
+
|
|
2574
|
+
Args:
|
|
2575
|
+
run_id: Filter by run ID.
|
|
2576
|
+
session_id: Filter by session ID.
|
|
2577
|
+
user_id: Filter by user ID.
|
|
2578
|
+
agent_id: Filter by agent ID.
|
|
2579
|
+
team_id: Filter by team ID.
|
|
2580
|
+
workflow_id: Filter by workflow ID.
|
|
2581
|
+
status: Filter by status (OK, ERROR, UNSET).
|
|
2582
|
+
start_time: Filter traces starting after this datetime.
|
|
2583
|
+
end_time: Filter traces ending before this datetime.
|
|
2584
|
+
limit: Maximum number of traces to return per page.
|
|
2585
|
+
page: Page number (1-indexed).
|
|
2586
|
+
|
|
2587
|
+
Returns:
|
|
2588
|
+
tuple[List[Trace], int]: Tuple of (list of matching traces, total count).
|
|
2589
|
+
"""
|
|
2590
|
+
try:
|
|
2591
|
+
from agno.tracing.schemas import Trace
|
|
2592
|
+
|
|
2593
|
+
log_debug(
|
|
2594
|
+
f"get_traces called with filters: run_id={run_id}, session_id={session_id}, user_id={user_id}, agent_id={agent_id}, page={page}, limit={limit}"
|
|
2595
|
+
)
|
|
2596
|
+
|
|
2597
|
+
table = self._get_table(table_type="traces")
|
|
2598
|
+
if table is None:
|
|
2599
|
+
log_debug("Traces table not found")
|
|
2600
|
+
return [], 0
|
|
2601
|
+
|
|
2602
|
+
# Get spans table for JOIN
|
|
2603
|
+
spans_table = self._get_table(table_type="spans")
|
|
2604
|
+
|
|
2605
|
+
with self.Session() as sess:
|
|
2606
|
+
# Build base query with aggregated span counts
|
|
2607
|
+
base_stmt = self._get_traces_base_query(table, spans_table)
|
|
2608
|
+
|
|
2609
|
+
# Apply filters
|
|
2610
|
+
if run_id:
|
|
2611
|
+
base_stmt = base_stmt.where(table.c.run_id == run_id)
|
|
2612
|
+
if session_id:
|
|
2613
|
+
base_stmt = base_stmt.where(table.c.session_id == session_id)
|
|
2614
|
+
if user_id:
|
|
2615
|
+
base_stmt = base_stmt.where(table.c.user_id == user_id)
|
|
2616
|
+
if agent_id:
|
|
2617
|
+
base_stmt = base_stmt.where(table.c.agent_id == agent_id)
|
|
2618
|
+
if team_id:
|
|
2619
|
+
base_stmt = base_stmt.where(table.c.team_id == team_id)
|
|
2620
|
+
if workflow_id:
|
|
2621
|
+
base_stmt = base_stmt.where(table.c.workflow_id == workflow_id)
|
|
2622
|
+
if status:
|
|
2623
|
+
base_stmt = base_stmt.where(table.c.status == status)
|
|
2624
|
+
if start_time:
|
|
2625
|
+
# Convert datetime to ISO string for comparison
|
|
2626
|
+
base_stmt = base_stmt.where(table.c.start_time >= start_time.isoformat())
|
|
2627
|
+
if end_time:
|
|
2628
|
+
# Convert datetime to ISO string for comparison
|
|
2629
|
+
base_stmt = base_stmt.where(table.c.end_time <= end_time.isoformat())
|
|
2630
|
+
|
|
2631
|
+
# Get total count
|
|
2632
|
+
count_stmt = select(func.count()).select_from(base_stmt.alias())
|
|
2633
|
+
total_count = sess.execute(count_stmt).scalar() or 0
|
|
2634
|
+
|
|
2635
|
+
# Apply pagination
|
|
2636
|
+
offset = (page - 1) * limit if page and limit else 0
|
|
2637
|
+
paginated_stmt = base_stmt.order_by(table.c.start_time.desc()).limit(limit).offset(offset)
|
|
2638
|
+
|
|
2639
|
+
results = sess.execute(paginated_stmt).fetchall()
|
|
2640
|
+
|
|
2641
|
+
traces = [Trace.from_dict(dict(row._mapping)) for row in results]
|
|
2642
|
+
return traces, total_count
|
|
2643
|
+
|
|
2644
|
+
except Exception as e:
|
|
2645
|
+
log_error(f"Error getting traces: {e}")
|
|
2646
|
+
return [], 0
|
|
2647
|
+
|
|
2648
|
+
def get_trace_stats(
|
|
2649
|
+
self,
|
|
2650
|
+
user_id: Optional[str] = None,
|
|
2651
|
+
agent_id: Optional[str] = None,
|
|
2652
|
+
team_id: Optional[str] = None,
|
|
2653
|
+
workflow_id: Optional[str] = None,
|
|
2654
|
+
start_time: Optional[datetime] = None,
|
|
2655
|
+
end_time: Optional[datetime] = None,
|
|
2656
|
+
limit: Optional[int] = 20,
|
|
2657
|
+
page: Optional[int] = 1,
|
|
2658
|
+
) -> tuple[List[Dict[str, Any]], int]:
|
|
2659
|
+
"""Get trace statistics grouped by session.
|
|
2660
|
+
|
|
2661
|
+
Args:
|
|
2662
|
+
user_id: Filter by user ID.
|
|
2663
|
+
agent_id: Filter by agent ID.
|
|
2664
|
+
team_id: Filter by team ID.
|
|
2665
|
+
workflow_id: Filter by workflow ID.
|
|
2666
|
+
start_time: Filter sessions with traces created after this datetime.
|
|
2667
|
+
end_time: Filter sessions with traces created before this datetime.
|
|
2668
|
+
limit: Maximum number of sessions to return per page.
|
|
2669
|
+
page: Page number (1-indexed).
|
|
2670
|
+
|
|
2671
|
+
Returns:
|
|
2672
|
+
tuple[List[Dict], int]: Tuple of (list of session stats dicts, total count).
|
|
2673
|
+
Each dict contains: session_id, user_id, agent_id, team_id, total_traces,
|
|
2674
|
+
first_trace_at, last_trace_at.
|
|
2675
|
+
"""
|
|
2676
|
+
try:
|
|
2677
|
+
log_debug(
|
|
2678
|
+
f"get_trace_stats called with filters: user_id={user_id}, agent_id={agent_id}, "
|
|
2679
|
+
f"workflow_id={workflow_id}, team_id={team_id}, "
|
|
2680
|
+
f"start_time={start_time}, end_time={end_time}, page={page}, limit={limit}"
|
|
2681
|
+
)
|
|
2682
|
+
|
|
2683
|
+
table = self._get_table(table_type="traces")
|
|
2684
|
+
if table is None:
|
|
2685
|
+
log_debug("Traces table not found")
|
|
2686
|
+
return [], 0
|
|
2687
|
+
|
|
2688
|
+
with self.Session() as sess:
|
|
2689
|
+
# Build base query grouped by session_id
|
|
2690
|
+
base_stmt = (
|
|
2691
|
+
select(
|
|
2692
|
+
table.c.session_id,
|
|
2693
|
+
table.c.user_id,
|
|
2694
|
+
table.c.agent_id,
|
|
2695
|
+
table.c.team_id,
|
|
2696
|
+
table.c.workflow_id,
|
|
2697
|
+
func.count(table.c.trace_id).label("total_traces"),
|
|
2698
|
+
func.min(table.c.created_at).label("first_trace_at"),
|
|
2699
|
+
func.max(table.c.created_at).label("last_trace_at"),
|
|
2700
|
+
)
|
|
2701
|
+
.where(table.c.session_id.isnot(None)) # Only sessions with session_id
|
|
2702
|
+
.group_by(
|
|
2703
|
+
table.c.session_id, table.c.user_id, table.c.agent_id, table.c.team_id, table.c.workflow_id
|
|
2704
|
+
)
|
|
2705
|
+
)
|
|
2706
|
+
|
|
2707
|
+
# Apply filters
|
|
2708
|
+
if user_id:
|
|
2709
|
+
base_stmt = base_stmt.where(table.c.user_id == user_id)
|
|
2710
|
+
if workflow_id:
|
|
2711
|
+
base_stmt = base_stmt.where(table.c.workflow_id == workflow_id)
|
|
2712
|
+
if team_id:
|
|
2713
|
+
base_stmt = base_stmt.where(table.c.team_id == team_id)
|
|
2714
|
+
if agent_id:
|
|
2715
|
+
base_stmt = base_stmt.where(table.c.agent_id == agent_id)
|
|
2716
|
+
if start_time:
|
|
2717
|
+
# Convert datetime to ISO string for comparison
|
|
2718
|
+
base_stmt = base_stmt.where(table.c.created_at >= start_time.isoformat())
|
|
2719
|
+
if end_time:
|
|
2720
|
+
# Convert datetime to ISO string for comparison
|
|
2721
|
+
base_stmt = base_stmt.where(table.c.created_at <= end_time.isoformat())
|
|
2722
|
+
|
|
2723
|
+
# Get total count of sessions
|
|
2724
|
+
count_stmt = select(func.count()).select_from(base_stmt.alias())
|
|
2725
|
+
total_count = sess.execute(count_stmt).scalar() or 0
|
|
2726
|
+
log_debug(f"Total matching sessions: {total_count}")
|
|
2727
|
+
|
|
2728
|
+
# Apply pagination and ordering
|
|
2729
|
+
offset = (page - 1) * limit if page and limit else 0
|
|
2730
|
+
paginated_stmt = base_stmt.order_by(func.max(table.c.created_at).desc()).limit(limit).offset(offset)
|
|
2731
|
+
|
|
2732
|
+
results = sess.execute(paginated_stmt).fetchall()
|
|
2733
|
+
log_debug(f"Returning page {page} with {len(results)} session stats")
|
|
2734
|
+
|
|
2735
|
+
# Convert to list of dicts with datetime objects
|
|
2736
|
+
stats_list = []
|
|
2737
|
+
for row in results:
|
|
2738
|
+
# Convert ISO strings to datetime objects
|
|
2739
|
+
first_trace_at_str = row.first_trace_at
|
|
2740
|
+
last_trace_at_str = row.last_trace_at
|
|
2741
|
+
|
|
2742
|
+
# Parse ISO format strings to datetime objects (handle None values)
|
|
2743
|
+
first_trace_at = None
|
|
2744
|
+
last_trace_at = None
|
|
2745
|
+
if first_trace_at_str is not None:
|
|
2746
|
+
first_trace_at = datetime.fromisoformat(first_trace_at_str.replace("Z", "+00:00"))
|
|
2747
|
+
if last_trace_at_str is not None:
|
|
2748
|
+
last_trace_at = datetime.fromisoformat(last_trace_at_str.replace("Z", "+00:00"))
|
|
2749
|
+
|
|
2750
|
+
stats_list.append(
|
|
2751
|
+
{
|
|
2752
|
+
"session_id": row.session_id,
|
|
2753
|
+
"user_id": row.user_id,
|
|
2754
|
+
"agent_id": row.agent_id,
|
|
2755
|
+
"team_id": row.team_id,
|
|
2756
|
+
"workflow_id": row.workflow_id,
|
|
2757
|
+
"total_traces": row.total_traces,
|
|
2758
|
+
"first_trace_at": first_trace_at,
|
|
2759
|
+
"last_trace_at": last_trace_at,
|
|
2760
|
+
}
|
|
2761
|
+
)
|
|
2762
|
+
|
|
2763
|
+
return stats_list, total_count
|
|
2764
|
+
|
|
2765
|
+
except Exception as e:
|
|
2766
|
+
log_error(f"Error getting trace stats: {e}")
|
|
2767
|
+
return [], 0
|
|
2768
|
+
|
|
2769
|
+
# --- Spans ---
|
|
2770
|
+
def create_span(self, span: "Span") -> None:
|
|
2771
|
+
"""Create a single span in the database.
|
|
2772
|
+
|
|
2773
|
+
Args:
|
|
2774
|
+
span: The Span object to store.
|
|
2775
|
+
"""
|
|
2776
|
+
try:
|
|
2777
|
+
table = self._get_table(table_type="spans", create_table_if_not_found=True)
|
|
2778
|
+
if table is None:
|
|
2779
|
+
return
|
|
2780
|
+
|
|
2781
|
+
with self.Session() as sess, sess.begin():
|
|
2782
|
+
stmt = mysql.insert(table).values(span.to_dict())
|
|
2783
|
+
sess.execute(stmt)
|
|
2784
|
+
|
|
2785
|
+
except Exception as e:
|
|
2786
|
+
log_error(f"Error creating span: {e}")
|
|
2787
|
+
|
|
2788
|
+
def create_spans(self, spans: List) -> None:
|
|
2789
|
+
"""Create multiple spans in the database as a batch.
|
|
2790
|
+
|
|
2791
|
+
Args:
|
|
2792
|
+
spans: List of Span objects to store.
|
|
2793
|
+
"""
|
|
2794
|
+
if not spans:
|
|
2795
|
+
return
|
|
2796
|
+
|
|
2797
|
+
try:
|
|
2798
|
+
table = self._get_table(table_type="spans", create_table_if_not_found=True)
|
|
2799
|
+
if table is None:
|
|
2800
|
+
return
|
|
2801
|
+
|
|
2802
|
+
with self.Session() as sess, sess.begin():
|
|
2803
|
+
for span in spans:
|
|
2804
|
+
stmt = mysql.insert(table).values(span.to_dict())
|
|
2805
|
+
sess.execute(stmt)
|
|
2806
|
+
|
|
2807
|
+
except Exception as e:
|
|
2808
|
+
log_error(f"Error creating spans batch: {e}")
|
|
2809
|
+
|
|
2810
|
+
def get_span(self, span_id: str):
|
|
2811
|
+
"""Get a single span by its span_id.
|
|
2812
|
+
|
|
2813
|
+
Args:
|
|
2814
|
+
span_id: The unique span identifier.
|
|
2815
|
+
|
|
2816
|
+
Returns:
|
|
2817
|
+
Optional[Span]: The span if found, None otherwise.
|
|
2818
|
+
"""
|
|
2819
|
+
try:
|
|
2820
|
+
from agno.tracing.schemas import Span
|
|
2821
|
+
|
|
2822
|
+
table = self._get_table(table_type="spans")
|
|
2823
|
+
if table is None:
|
|
2824
|
+
return None
|
|
2825
|
+
|
|
2826
|
+
with self.Session() as sess:
|
|
2827
|
+
stmt = select(table).where(table.c.span_id == span_id)
|
|
2828
|
+
result = sess.execute(stmt).fetchone()
|
|
2829
|
+
if result:
|
|
2830
|
+
return Span.from_dict(dict(result._mapping))
|
|
2831
|
+
return None
|
|
2832
|
+
|
|
2833
|
+
except Exception as e:
|
|
2834
|
+
log_error(f"Error getting span: {e}")
|
|
2835
|
+
return None
|
|
2836
|
+
|
|
2837
|
+
def get_spans(
|
|
2838
|
+
self,
|
|
2839
|
+
trace_id: Optional[str] = None,
|
|
2840
|
+
parent_span_id: Optional[str] = None,
|
|
2841
|
+
limit: Optional[int] = 1000,
|
|
2842
|
+
) -> List:
|
|
2843
|
+
"""Get spans matching the provided filters.
|
|
2844
|
+
|
|
2845
|
+
Args:
|
|
2846
|
+
trace_id: Filter by trace ID.
|
|
2847
|
+
parent_span_id: Filter by parent span ID.
|
|
2848
|
+
limit: Maximum number of spans to return.
|
|
2849
|
+
|
|
2850
|
+
Returns:
|
|
2851
|
+
List[Span]: List of matching spans.
|
|
2852
|
+
"""
|
|
2853
|
+
try:
|
|
2854
|
+
from agno.tracing.schemas import Span
|
|
2855
|
+
|
|
2856
|
+
table = self._get_table(table_type="spans")
|
|
2857
|
+
if table is None:
|
|
2858
|
+
return []
|
|
2859
|
+
|
|
2860
|
+
with self.Session() as sess:
|
|
2861
|
+
stmt = select(table)
|
|
2862
|
+
|
|
2863
|
+
# Apply filters
|
|
2864
|
+
if trace_id:
|
|
2865
|
+
stmt = stmt.where(table.c.trace_id == trace_id)
|
|
2866
|
+
if parent_span_id:
|
|
2867
|
+
stmt = stmt.where(table.c.parent_span_id == parent_span_id)
|
|
2868
|
+
|
|
2869
|
+
if limit:
|
|
2870
|
+
stmt = stmt.limit(limit)
|
|
2871
|
+
|
|
2872
|
+
results = sess.execute(stmt).fetchall()
|
|
2873
|
+
return [Span.from_dict(dict(row._mapping)) for row in results]
|
|
2874
|
+
|
|
2875
|
+
except Exception as e:
|
|
2876
|
+
log_error(f"Error getting spans: {e}")
|
|
2877
|
+
return []
|