agno 2.0.0rc2__py3-none-any.whl → 2.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agno/agent/agent.py +6009 -2874
- agno/api/api.py +2 -0
- agno/api/os.py +1 -1
- agno/culture/__init__.py +3 -0
- agno/culture/manager.py +956 -0
- agno/db/async_postgres/__init__.py +3 -0
- agno/db/base.py +385 -6
- agno/db/dynamo/dynamo.py +388 -81
- agno/db/dynamo/schemas.py +47 -10
- agno/db/dynamo/utils.py +63 -4
- agno/db/firestore/firestore.py +435 -64
- agno/db/firestore/schemas.py +11 -0
- agno/db/firestore/utils.py +102 -4
- agno/db/gcs_json/gcs_json_db.py +384 -42
- agno/db/gcs_json/utils.py +60 -26
- agno/db/in_memory/in_memory_db.py +351 -66
- agno/db/in_memory/utils.py +60 -2
- agno/db/json/json_db.py +339 -48
- agno/db/json/utils.py +60 -26
- agno/db/migrations/manager.py +199 -0
- agno/db/migrations/v1_to_v2.py +510 -37
- agno/db/migrations/versions/__init__.py +0 -0
- agno/db/migrations/versions/v2_3_0.py +938 -0
- agno/db/mongo/__init__.py +15 -1
- agno/db/mongo/async_mongo.py +2036 -0
- agno/db/mongo/mongo.py +653 -76
- agno/db/mongo/schemas.py +13 -0
- agno/db/mongo/utils.py +80 -8
- agno/db/mysql/mysql.py +687 -25
- agno/db/mysql/schemas.py +61 -37
- agno/db/mysql/utils.py +60 -2
- agno/db/postgres/__init__.py +2 -1
- agno/db/postgres/async_postgres.py +2001 -0
- agno/db/postgres/postgres.py +676 -57
- agno/db/postgres/schemas.py +43 -18
- agno/db/postgres/utils.py +164 -2
- agno/db/redis/redis.py +344 -38
- agno/db/redis/schemas.py +18 -0
- agno/db/redis/utils.py +60 -2
- agno/db/schemas/__init__.py +2 -1
- agno/db/schemas/culture.py +120 -0
- agno/db/schemas/memory.py +13 -0
- agno/db/singlestore/schemas.py +26 -1
- agno/db/singlestore/singlestore.py +687 -53
- agno/db/singlestore/utils.py +60 -2
- agno/db/sqlite/__init__.py +2 -1
- agno/db/sqlite/async_sqlite.py +2371 -0
- agno/db/sqlite/schemas.py +24 -0
- agno/db/sqlite/sqlite.py +774 -85
- agno/db/sqlite/utils.py +168 -5
- agno/db/surrealdb/__init__.py +3 -0
- agno/db/surrealdb/metrics.py +292 -0
- agno/db/surrealdb/models.py +309 -0
- agno/db/surrealdb/queries.py +71 -0
- agno/db/surrealdb/surrealdb.py +1361 -0
- agno/db/surrealdb/utils.py +147 -0
- agno/db/utils.py +50 -22
- agno/eval/accuracy.py +50 -43
- agno/eval/performance.py +6 -3
- agno/eval/reliability.py +6 -3
- agno/eval/utils.py +33 -16
- agno/exceptions.py +68 -1
- agno/filters.py +354 -0
- agno/guardrails/__init__.py +6 -0
- agno/guardrails/base.py +19 -0
- agno/guardrails/openai.py +144 -0
- agno/guardrails/pii.py +94 -0
- agno/guardrails/prompt_injection.py +52 -0
- agno/integrations/discord/client.py +1 -0
- agno/knowledge/chunking/agentic.py +13 -10
- agno/knowledge/chunking/fixed.py +1 -1
- agno/knowledge/chunking/semantic.py +40 -8
- agno/knowledge/chunking/strategy.py +59 -15
- agno/knowledge/embedder/aws_bedrock.py +9 -4
- agno/knowledge/embedder/azure_openai.py +54 -0
- agno/knowledge/embedder/base.py +2 -0
- agno/knowledge/embedder/cohere.py +184 -5
- agno/knowledge/embedder/fastembed.py +1 -1
- agno/knowledge/embedder/google.py +79 -1
- agno/knowledge/embedder/huggingface.py +9 -4
- agno/knowledge/embedder/jina.py +63 -0
- agno/knowledge/embedder/mistral.py +78 -11
- agno/knowledge/embedder/nebius.py +1 -1
- agno/knowledge/embedder/ollama.py +13 -0
- agno/knowledge/embedder/openai.py +37 -65
- agno/knowledge/embedder/sentence_transformer.py +8 -4
- agno/knowledge/embedder/vllm.py +262 -0
- agno/knowledge/embedder/voyageai.py +69 -16
- agno/knowledge/knowledge.py +595 -187
- agno/knowledge/reader/base.py +9 -2
- agno/knowledge/reader/csv_reader.py +8 -10
- agno/knowledge/reader/docx_reader.py +5 -6
- agno/knowledge/reader/field_labeled_csv_reader.py +290 -0
- agno/knowledge/reader/json_reader.py +6 -5
- agno/knowledge/reader/markdown_reader.py +13 -13
- agno/knowledge/reader/pdf_reader.py +43 -68
- agno/knowledge/reader/pptx_reader.py +101 -0
- agno/knowledge/reader/reader_factory.py +51 -6
- agno/knowledge/reader/s3_reader.py +3 -15
- agno/knowledge/reader/tavily_reader.py +194 -0
- agno/knowledge/reader/text_reader.py +13 -13
- agno/knowledge/reader/web_search_reader.py +2 -43
- agno/knowledge/reader/website_reader.py +43 -25
- agno/knowledge/reranker/__init__.py +3 -0
- agno/knowledge/types.py +9 -0
- agno/knowledge/utils.py +20 -0
- agno/media.py +339 -266
- agno/memory/manager.py +336 -82
- agno/models/aimlapi/aimlapi.py +2 -2
- agno/models/anthropic/claude.py +183 -37
- agno/models/aws/bedrock.py +52 -112
- agno/models/aws/claude.py +33 -1
- agno/models/azure/ai_foundry.py +33 -15
- agno/models/azure/openai_chat.py +25 -8
- agno/models/base.py +1011 -566
- agno/models/cerebras/cerebras.py +19 -13
- agno/models/cerebras/cerebras_openai.py +8 -5
- agno/models/cohere/chat.py +27 -1
- agno/models/cometapi/__init__.py +5 -0
- agno/models/cometapi/cometapi.py +57 -0
- agno/models/dashscope/dashscope.py +1 -0
- agno/models/deepinfra/deepinfra.py +2 -2
- agno/models/deepseek/deepseek.py +2 -2
- agno/models/fireworks/fireworks.py +2 -2
- agno/models/google/gemini.py +110 -37
- agno/models/groq/groq.py +28 -11
- agno/models/huggingface/huggingface.py +2 -1
- agno/models/internlm/internlm.py +2 -2
- agno/models/langdb/langdb.py +4 -4
- agno/models/litellm/chat.py +18 -1
- agno/models/litellm/litellm_openai.py +2 -2
- agno/models/llama_cpp/__init__.py +5 -0
- agno/models/llama_cpp/llama_cpp.py +22 -0
- agno/models/message.py +143 -4
- agno/models/meta/llama.py +27 -10
- agno/models/meta/llama_openai.py +5 -17
- agno/models/nebius/nebius.py +6 -6
- agno/models/nexus/__init__.py +3 -0
- agno/models/nexus/nexus.py +22 -0
- agno/models/nvidia/nvidia.py +2 -2
- agno/models/ollama/chat.py +60 -6
- agno/models/openai/chat.py +102 -43
- agno/models/openai/responses.py +103 -106
- agno/models/openrouter/openrouter.py +41 -3
- agno/models/perplexity/perplexity.py +4 -5
- agno/models/portkey/portkey.py +3 -3
- agno/models/requesty/__init__.py +5 -0
- agno/models/requesty/requesty.py +52 -0
- agno/models/response.py +81 -5
- agno/models/sambanova/sambanova.py +2 -2
- agno/models/siliconflow/__init__.py +5 -0
- agno/models/siliconflow/siliconflow.py +25 -0
- agno/models/together/together.py +2 -2
- agno/models/utils.py +254 -8
- agno/models/vercel/v0.py +2 -2
- agno/models/vertexai/__init__.py +0 -0
- agno/models/vertexai/claude.py +96 -0
- agno/models/vllm/vllm.py +1 -0
- agno/models/xai/xai.py +3 -2
- agno/os/app.py +543 -175
- agno/os/auth.py +24 -14
- agno/os/config.py +1 -0
- agno/os/interfaces/__init__.py +1 -0
- agno/os/interfaces/a2a/__init__.py +3 -0
- agno/os/interfaces/a2a/a2a.py +42 -0
- agno/os/interfaces/a2a/router.py +250 -0
- agno/os/interfaces/a2a/utils.py +924 -0
- agno/os/interfaces/agui/agui.py +23 -7
- agno/os/interfaces/agui/router.py +27 -3
- agno/os/interfaces/agui/utils.py +242 -142
- agno/os/interfaces/base.py +6 -2
- agno/os/interfaces/slack/router.py +81 -23
- agno/os/interfaces/slack/slack.py +29 -14
- agno/os/interfaces/whatsapp/router.py +11 -4
- agno/os/interfaces/whatsapp/whatsapp.py +14 -7
- agno/os/mcp.py +111 -54
- agno/os/middleware/__init__.py +7 -0
- agno/os/middleware/jwt.py +233 -0
- agno/os/router.py +556 -139
- agno/os/routers/evals/evals.py +71 -34
- agno/os/routers/evals/schemas.py +31 -31
- agno/os/routers/evals/utils.py +6 -5
- agno/os/routers/health.py +31 -0
- agno/os/routers/home.py +52 -0
- agno/os/routers/knowledge/knowledge.py +185 -38
- agno/os/routers/knowledge/schemas.py +82 -22
- agno/os/routers/memory/memory.py +158 -53
- agno/os/routers/memory/schemas.py +20 -16
- agno/os/routers/metrics/metrics.py +20 -8
- agno/os/routers/metrics/schemas.py +16 -16
- agno/os/routers/session/session.py +499 -38
- agno/os/schema.py +308 -198
- agno/os/utils.py +401 -41
- agno/reasoning/anthropic.py +80 -0
- agno/reasoning/azure_ai_foundry.py +2 -2
- agno/reasoning/deepseek.py +2 -2
- agno/reasoning/default.py +3 -1
- agno/reasoning/gemini.py +73 -0
- agno/reasoning/groq.py +2 -2
- agno/reasoning/ollama.py +2 -2
- agno/reasoning/openai.py +7 -2
- agno/reasoning/vertexai.py +76 -0
- agno/run/__init__.py +6 -0
- agno/run/agent.py +266 -112
- agno/run/base.py +53 -24
- agno/run/team.py +252 -111
- agno/run/workflow.py +156 -45
- agno/session/agent.py +105 -89
- agno/session/summary.py +65 -25
- agno/session/team.py +176 -96
- agno/session/workflow.py +406 -40
- agno/team/team.py +3854 -1692
- agno/tools/brightdata.py +3 -3
- agno/tools/cartesia.py +3 -5
- agno/tools/dalle.py +9 -8
- agno/tools/decorator.py +4 -2
- agno/tools/desi_vocal.py +2 -2
- agno/tools/duckduckgo.py +15 -11
- agno/tools/e2b.py +20 -13
- agno/tools/eleven_labs.py +26 -28
- agno/tools/exa.py +21 -16
- agno/tools/fal.py +4 -4
- agno/tools/file.py +153 -23
- agno/tools/file_generation.py +350 -0
- agno/tools/firecrawl.py +4 -4
- agno/tools/function.py +257 -37
- agno/tools/giphy.py +2 -2
- agno/tools/gmail.py +238 -14
- agno/tools/google_drive.py +270 -0
- agno/tools/googlecalendar.py +36 -8
- agno/tools/googlesheets.py +20 -5
- agno/tools/jira.py +20 -0
- agno/tools/knowledge.py +3 -3
- agno/tools/lumalab.py +3 -3
- agno/tools/mcp/__init__.py +10 -0
- agno/tools/mcp/mcp.py +331 -0
- agno/tools/mcp/multi_mcp.py +347 -0
- agno/tools/mcp/params.py +24 -0
- agno/tools/mcp_toolbox.py +284 -0
- agno/tools/mem0.py +11 -17
- agno/tools/memori.py +1 -53
- agno/tools/memory.py +419 -0
- agno/tools/models/azure_openai.py +2 -2
- agno/tools/models/gemini.py +3 -3
- agno/tools/models/groq.py +3 -5
- agno/tools/models/nebius.py +7 -7
- agno/tools/models_labs.py +25 -15
- agno/tools/notion.py +204 -0
- agno/tools/openai.py +4 -9
- agno/tools/opencv.py +3 -3
- agno/tools/parallel.py +314 -0
- agno/tools/replicate.py +7 -7
- agno/tools/scrapegraph.py +58 -31
- agno/tools/searxng.py +2 -2
- agno/tools/serper.py +2 -2
- agno/tools/slack.py +18 -3
- agno/tools/spider.py +2 -2
- agno/tools/tavily.py +146 -0
- agno/tools/whatsapp.py +1 -1
- agno/tools/workflow.py +278 -0
- agno/tools/yfinance.py +12 -11
- agno/utils/agent.py +820 -0
- agno/utils/audio.py +27 -0
- agno/utils/common.py +90 -1
- agno/utils/events.py +222 -7
- agno/utils/gemini.py +181 -23
- agno/utils/hooks.py +57 -0
- agno/utils/http.py +111 -0
- agno/utils/knowledge.py +12 -5
- agno/utils/log.py +1 -0
- agno/utils/mcp.py +95 -5
- agno/utils/media.py +188 -10
- agno/utils/merge_dict.py +22 -1
- agno/utils/message.py +60 -0
- agno/utils/models/claude.py +40 -11
- agno/utils/models/cohere.py +1 -1
- agno/utils/models/watsonx.py +1 -1
- agno/utils/openai.py +1 -1
- agno/utils/print_response/agent.py +105 -21
- agno/utils/print_response/team.py +103 -38
- agno/utils/print_response/workflow.py +251 -34
- agno/utils/reasoning.py +22 -1
- agno/utils/serialize.py +32 -0
- agno/utils/streamlit.py +16 -10
- agno/utils/string.py +41 -0
- agno/utils/team.py +98 -9
- agno/utils/tools.py +1 -1
- agno/vectordb/base.py +23 -4
- agno/vectordb/cassandra/cassandra.py +65 -9
- agno/vectordb/chroma/chromadb.py +182 -38
- agno/vectordb/clickhouse/clickhousedb.py +64 -11
- agno/vectordb/couchbase/couchbase.py +105 -10
- agno/vectordb/lancedb/lance_db.py +183 -135
- agno/vectordb/langchaindb/langchaindb.py +25 -7
- agno/vectordb/lightrag/lightrag.py +17 -3
- agno/vectordb/llamaindex/__init__.py +3 -0
- agno/vectordb/llamaindex/llamaindexdb.py +46 -7
- agno/vectordb/milvus/milvus.py +126 -9
- agno/vectordb/mongodb/__init__.py +7 -1
- agno/vectordb/mongodb/mongodb.py +112 -7
- agno/vectordb/pgvector/pgvector.py +142 -21
- agno/vectordb/pineconedb/pineconedb.py +80 -8
- agno/vectordb/qdrant/qdrant.py +125 -39
- agno/vectordb/redis/__init__.py +9 -0
- agno/vectordb/redis/redisdb.py +694 -0
- agno/vectordb/singlestore/singlestore.py +111 -25
- agno/vectordb/surrealdb/surrealdb.py +31 -5
- agno/vectordb/upstashdb/upstashdb.py +76 -8
- agno/vectordb/weaviate/weaviate.py +86 -15
- agno/workflow/__init__.py +2 -0
- agno/workflow/agent.py +299 -0
- agno/workflow/condition.py +112 -18
- agno/workflow/loop.py +69 -10
- agno/workflow/parallel.py +266 -118
- agno/workflow/router.py +110 -17
- agno/workflow/step.py +645 -136
- agno/workflow/steps.py +65 -6
- agno/workflow/types.py +71 -33
- agno/workflow/workflow.py +2113 -300
- agno-2.3.0.dist-info/METADATA +618 -0
- agno-2.3.0.dist-info/RECORD +577 -0
- agno-2.3.0.dist-info/licenses/LICENSE +201 -0
- agno/knowledge/reader/url_reader.py +0 -128
- agno/tools/googlesearch.py +0 -98
- agno/tools/mcp.py +0 -610
- agno/utils/models/aws_claude.py +0 -170
- agno-2.0.0rc2.dist-info/METADATA +0 -355
- agno-2.0.0rc2.dist-info/RECORD +0 -515
- agno-2.0.0rc2.dist-info/licenses/LICENSE +0 -375
- {agno-2.0.0rc2.dist-info → agno-2.3.0.dist-info}/WHEEL +0 -0
- {agno-2.0.0rc2.dist-info → agno-2.3.0.dist-info}/top_level.txt +0 -0
agno/db/sqlite/sqlite.py
CHANGED
|
@@ -1,10 +1,12 @@
|
|
|
1
1
|
import time
|
|
2
2
|
from datetime import date, datetime, timedelta, timezone
|
|
3
3
|
from pathlib import Path
|
|
4
|
-
from typing import Any, Dict, List, Optional, Sequence, Tuple, Union
|
|
4
|
+
from typing import Any, Dict, List, Optional, Sequence, Tuple, Union, cast
|
|
5
5
|
from uuid import uuid4
|
|
6
6
|
|
|
7
7
|
from agno.db.base import BaseDb, SessionType
|
|
8
|
+
from agno.db.migrations.manager import MigrationManager
|
|
9
|
+
from agno.db.schemas.culture import CulturalKnowledge
|
|
8
10
|
from agno.db.schemas.evals import EvalFilterType, EvalRunRecord, EvalType
|
|
9
11
|
from agno.db.schemas.knowledge import KnowledgeRow
|
|
10
12
|
from agno.db.schemas.memory import UserMemory
|
|
@@ -13,17 +15,20 @@ from agno.db.sqlite.utils import (
|
|
|
13
15
|
apply_sorting,
|
|
14
16
|
bulk_upsert_metrics,
|
|
15
17
|
calculate_date_metrics,
|
|
18
|
+
deserialize_cultural_knowledge_from_db,
|
|
16
19
|
fetch_all_sessions_data,
|
|
17
20
|
get_dates_to_calculate_metrics_for,
|
|
18
21
|
is_table_available,
|
|
19
22
|
is_valid_table,
|
|
23
|
+
serialize_cultural_knowledge_for_db,
|
|
20
24
|
)
|
|
21
25
|
from agno.db.utils import deserialize_session_json_fields, serialize_session_json_fields
|
|
22
26
|
from agno.session import AgentSession, Session, TeamSession, WorkflowSession
|
|
23
27
|
from agno.utils.log import log_debug, log_error, log_info, log_warning
|
|
28
|
+
from agno.utils.string import generate_id
|
|
24
29
|
|
|
25
30
|
try:
|
|
26
|
-
from sqlalchemy import Column, MetaData, Table, and_, func, select, text
|
|
31
|
+
from sqlalchemy import Column, MetaData, Table, and_, func, select, text
|
|
27
32
|
from sqlalchemy.dialects import sqlite
|
|
28
33
|
from sqlalchemy.engine import Engine, create_engine
|
|
29
34
|
from sqlalchemy.orm import scoped_session, sessionmaker
|
|
@@ -35,14 +40,17 @@ except ImportError:
|
|
|
35
40
|
class SqliteDb(BaseDb):
|
|
36
41
|
def __init__(
|
|
37
42
|
self,
|
|
43
|
+
db_file: Optional[str] = None,
|
|
38
44
|
db_engine: Optional[Engine] = None,
|
|
39
45
|
db_url: Optional[str] = None,
|
|
40
|
-
db_file: Optional[str] = None,
|
|
41
46
|
session_table: Optional[str] = None,
|
|
47
|
+
culture_table: Optional[str] = None,
|
|
42
48
|
memory_table: Optional[str] = None,
|
|
43
49
|
metrics_table: Optional[str] = None,
|
|
44
50
|
eval_table: Optional[str] = None,
|
|
45
51
|
knowledge_table: Optional[str] = None,
|
|
52
|
+
versions_table: Optional[str] = None,
|
|
53
|
+
id: Optional[str] = None,
|
|
46
54
|
):
|
|
47
55
|
"""
|
|
48
56
|
Interface for interacting with a SQLite database.
|
|
@@ -54,24 +62,34 @@ class SqliteDb(BaseDb):
|
|
|
54
62
|
4. Create a new database in the current directory
|
|
55
63
|
|
|
56
64
|
Args:
|
|
65
|
+
db_file (Optional[str]): The database file to connect to.
|
|
57
66
|
db_engine (Optional[Engine]): The SQLAlchemy database engine to use.
|
|
58
67
|
db_url (Optional[str]): The database URL to connect to.
|
|
59
|
-
db_file (Optional[str]): The database file to connect to.
|
|
60
68
|
session_table (Optional[str]): Name of the table to store Agent, Team and Workflow sessions.
|
|
69
|
+
culture_table (Optional[str]): Name of the table to store cultural notions.
|
|
61
70
|
memory_table (Optional[str]): Name of the table to store user memories.
|
|
62
71
|
metrics_table (Optional[str]): Name of the table to store metrics.
|
|
63
72
|
eval_table (Optional[str]): Name of the table to store evaluation runs data.
|
|
64
73
|
knowledge_table (Optional[str]): Name of the table to store knowledge documents data.
|
|
74
|
+
versions_table (Optional[str]): Name of the table to store schema versions.
|
|
75
|
+
id (Optional[str]): ID of the database.
|
|
65
76
|
|
|
66
77
|
Raises:
|
|
67
78
|
ValueError: If none of the tables are provided.
|
|
68
79
|
"""
|
|
80
|
+
if id is None:
|
|
81
|
+
seed = db_url or db_file or str(db_engine.url) if db_engine else "sqlite:///agno.db"
|
|
82
|
+
id = generate_id(seed)
|
|
83
|
+
|
|
69
84
|
super().__init__(
|
|
85
|
+
id=id,
|
|
70
86
|
session_table=session_table,
|
|
87
|
+
culture_table=culture_table,
|
|
71
88
|
memory_table=memory_table,
|
|
72
89
|
metrics_table=metrics_table,
|
|
73
90
|
eval_table=eval_table,
|
|
74
91
|
knowledge_table=knowledge_table,
|
|
92
|
+
versions_table=versions_table,
|
|
75
93
|
)
|
|
76
94
|
|
|
77
95
|
_engine: Optional[Engine] = db_engine
|
|
@@ -99,6 +117,36 @@ class SqliteDb(BaseDb):
|
|
|
99
117
|
self.Session: scoped_session = scoped_session(sessionmaker(bind=self.db_engine))
|
|
100
118
|
|
|
101
119
|
# -- DB methods --
|
|
120
|
+
def table_exists(self, table_name: str) -> bool:
|
|
121
|
+
"""Check if a table with the given name exists in the SQLite database.
|
|
122
|
+
|
|
123
|
+
Args:
|
|
124
|
+
table_name: Name of the table to check
|
|
125
|
+
|
|
126
|
+
Returns:
|
|
127
|
+
bool: True if the table exists in the database, False otherwise
|
|
128
|
+
"""
|
|
129
|
+
with self.Session() as sess:
|
|
130
|
+
return is_table_available(session=sess, table_name=table_name)
|
|
131
|
+
|
|
132
|
+
def _create_all_tables(self):
|
|
133
|
+
"""Create all tables for the database."""
|
|
134
|
+
tables_to_create = [
|
|
135
|
+
(self.session_table_name, "sessions"),
|
|
136
|
+
(self.memory_table_name, "memories"),
|
|
137
|
+
(self.metrics_table_name, "metrics"),
|
|
138
|
+
(self.eval_table_name, "evals"),
|
|
139
|
+
(self.knowledge_table_name, "knowledge"),
|
|
140
|
+
(self.versions_table_name, "versions"),
|
|
141
|
+
]
|
|
142
|
+
|
|
143
|
+
for table_name, table_type in tables_to_create:
|
|
144
|
+
if table_name != self.versions_table_name:
|
|
145
|
+
# Also store the schema version for the created table
|
|
146
|
+
latest_schema_version = MigrationManager(self).latest_schema_version
|
|
147
|
+
self.upsert_schema_version(table_name=table_name, version=latest_schema_version.public)
|
|
148
|
+
|
|
149
|
+
self._create_table(table_name=table_name, table_type=table_type)
|
|
102
150
|
|
|
103
151
|
def _create_table(self, table_name: str, table_type: str) -> Table:
|
|
104
152
|
"""
|
|
@@ -113,7 +161,7 @@ class SqliteDb(BaseDb):
|
|
|
113
161
|
"""
|
|
114
162
|
try:
|
|
115
163
|
table_schema = get_table_schema_definition(table_type)
|
|
116
|
-
log_debug(f"Creating table {table_name}
|
|
164
|
+
log_debug(f"Creating table {table_name}")
|
|
117
165
|
|
|
118
166
|
columns: List[Column] = []
|
|
119
167
|
indexes: List[str] = []
|
|
@@ -172,12 +220,12 @@ class SqliteDb(BaseDb):
|
|
|
172
220
|
except Exception as e:
|
|
173
221
|
log_warning(f"Error creating index {idx.name}: {e}")
|
|
174
222
|
|
|
175
|
-
|
|
223
|
+
log_debug(f"Successfully created table '{table_name}'")
|
|
176
224
|
return table
|
|
177
225
|
|
|
178
226
|
except Exception as e:
|
|
179
227
|
log_error(f"Could not create table '{table_name}': {e}")
|
|
180
|
-
raise
|
|
228
|
+
raise e
|
|
181
229
|
|
|
182
230
|
def _get_table(self, table_type: str, create_table_if_not_found: Optional[bool] = False) -> Optional[Table]:
|
|
183
231
|
if table_type == "sessions":
|
|
@@ -221,11 +269,30 @@ class SqliteDb(BaseDb):
|
|
|
221
269
|
)
|
|
222
270
|
return self.knowledge_table
|
|
223
271
|
|
|
272
|
+
elif table_type == "culture":
|
|
273
|
+
self.culture_table = self._get_or_create_table(
|
|
274
|
+
table_name=self.culture_table_name,
|
|
275
|
+
table_type="culture",
|
|
276
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
277
|
+
)
|
|
278
|
+
return self.culture_table
|
|
279
|
+
|
|
280
|
+
elif table_type == "versions":
|
|
281
|
+
self.versions_table = self._get_or_create_table(
|
|
282
|
+
table_name=self.versions_table_name,
|
|
283
|
+
table_type="versions",
|
|
284
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
285
|
+
)
|
|
286
|
+
return self.versions_table
|
|
287
|
+
|
|
224
288
|
else:
|
|
225
289
|
raise ValueError(f"Unknown table type: '{table_type}'")
|
|
226
290
|
|
|
227
291
|
def _get_or_create_table(
|
|
228
|
-
self,
|
|
292
|
+
self,
|
|
293
|
+
table_name: str,
|
|
294
|
+
table_type: str,
|
|
295
|
+
create_table_if_not_found: Optional[bool] = False,
|
|
229
296
|
) -> Optional[Table]:
|
|
230
297
|
"""
|
|
231
298
|
Check if the table exists and is valid, else create it.
|
|
@@ -243,6 +310,12 @@ class SqliteDb(BaseDb):
|
|
|
243
310
|
if not table_is_available:
|
|
244
311
|
if not create_table_if_not_found:
|
|
245
312
|
return None
|
|
313
|
+
|
|
314
|
+
if table_name != self.versions_table_name:
|
|
315
|
+
# Also store the schema version for the created table
|
|
316
|
+
latest_schema_version = MigrationManager(self).latest_schema_version
|
|
317
|
+
self.upsert_schema_version(table_name=table_name, version=latest_schema_version.public)
|
|
318
|
+
|
|
246
319
|
return self._create_table(table_name=table_name, table_type=table_type)
|
|
247
320
|
|
|
248
321
|
# SQLite version of table validation (no schema)
|
|
@@ -256,7 +329,43 @@ class SqliteDb(BaseDb):
|
|
|
256
329
|
|
|
257
330
|
except Exception as e:
|
|
258
331
|
log_error(f"Error loading existing table {table_name}: {e}")
|
|
259
|
-
raise
|
|
332
|
+
raise e
|
|
333
|
+
|
|
334
|
+
def get_latest_schema_version(self, table_name: str):
|
|
335
|
+
"""Get the latest version of the database schema."""
|
|
336
|
+
table = self._get_table(table_type="versions", create_table_if_not_found=True)
|
|
337
|
+
if table is None:
|
|
338
|
+
return "2.0.0"
|
|
339
|
+
with self.Session() as sess:
|
|
340
|
+
stmt = select(table)
|
|
341
|
+
# Latest version for the given table
|
|
342
|
+
stmt = stmt.where(table.c.table_name == table_name)
|
|
343
|
+
stmt = stmt.order_by(table.c.version.desc()).limit(1)
|
|
344
|
+
result = sess.execute(stmt).fetchone()
|
|
345
|
+
if result is None:
|
|
346
|
+
return "2.0.0"
|
|
347
|
+
version_dict = dict(result._mapping)
|
|
348
|
+
return version_dict.get("version") or "2.0.0"
|
|
349
|
+
|
|
350
|
+
def upsert_schema_version(self, table_name: str, version: str) -> None:
|
|
351
|
+
"""Upsert the schema version into the database."""
|
|
352
|
+
table = self._get_table(table_type="versions", create_table_if_not_found=True)
|
|
353
|
+
if table is None:
|
|
354
|
+
return
|
|
355
|
+
current_datetime = datetime.now().isoformat()
|
|
356
|
+
with self.Session() as sess, sess.begin():
|
|
357
|
+
stmt = sqlite.insert(table).values(
|
|
358
|
+
table_name=table_name,
|
|
359
|
+
version=version,
|
|
360
|
+
created_at=current_datetime, # Store as ISO format string
|
|
361
|
+
updated_at=current_datetime,
|
|
362
|
+
)
|
|
363
|
+
# Update version if table_name already exists
|
|
364
|
+
stmt = stmt.on_conflict_do_update(
|
|
365
|
+
index_elements=["table_name"],
|
|
366
|
+
set_=dict(version=version, updated_at=current_datetime),
|
|
367
|
+
)
|
|
368
|
+
sess.execute(stmt)
|
|
260
369
|
|
|
261
370
|
# -- Session methods --
|
|
262
371
|
|
|
@@ -287,7 +396,7 @@ class SqliteDb(BaseDb):
|
|
|
287
396
|
|
|
288
397
|
except Exception as e:
|
|
289
398
|
log_error(f"Error deleting session: {e}")
|
|
290
|
-
|
|
399
|
+
raise e
|
|
291
400
|
|
|
292
401
|
def delete_sessions(self, session_ids: List[str]) -> None:
|
|
293
402
|
"""Delete all given sessions from the database.
|
|
@@ -312,6 +421,7 @@ class SqliteDb(BaseDb):
|
|
|
312
421
|
|
|
313
422
|
except Exception as e:
|
|
314
423
|
log_error(f"Error deleting sessions: {e}")
|
|
424
|
+
raise e
|
|
315
425
|
|
|
316
426
|
def get_session(
|
|
317
427
|
self,
|
|
@@ -325,8 +435,8 @@ class SqliteDb(BaseDb):
|
|
|
325
435
|
|
|
326
436
|
Args:
|
|
327
437
|
session_id (str): ID of the session to read.
|
|
438
|
+
session_type (SessionType): Type of session to get.
|
|
328
439
|
user_id (Optional[str]): User ID to filter by. Defaults to None.
|
|
329
|
-
session_type (Optional[SessionType]): Type of session to read. Defaults to None.
|
|
330
440
|
deserialize (Optional[bool]): Whether to serialize the session. Defaults to True.
|
|
331
441
|
|
|
332
442
|
Returns:
|
|
@@ -348,8 +458,6 @@ class SqliteDb(BaseDb):
|
|
|
348
458
|
# Filtering
|
|
349
459
|
if user_id is not None:
|
|
350
460
|
stmt = stmt.where(table.c.user_id == user_id)
|
|
351
|
-
if session_type is not None:
|
|
352
|
-
stmt = stmt.where(table.c.session_type == session_type)
|
|
353
461
|
|
|
354
462
|
result = sess.execute(stmt).fetchone()
|
|
355
463
|
if result is None:
|
|
@@ -370,7 +478,7 @@ class SqliteDb(BaseDb):
|
|
|
370
478
|
|
|
371
479
|
except Exception as e:
|
|
372
480
|
log_debug(f"Exception reading from sessions table: {e}")
|
|
373
|
-
|
|
481
|
+
raise e
|
|
374
482
|
|
|
375
483
|
def get_sessions(
|
|
376
484
|
self,
|
|
@@ -433,11 +541,7 @@ class SqliteDb(BaseDb):
|
|
|
433
541
|
if end_timestamp is not None:
|
|
434
542
|
stmt = stmt.where(table.c.created_at <= end_timestamp)
|
|
435
543
|
if session_name is not None:
|
|
436
|
-
stmt = stmt.where(
|
|
437
|
-
func.coalesce(func.json_extract(table.c.session_data, "$.session_name"), "").like(
|
|
438
|
-
f"%{session_name}%"
|
|
439
|
-
)
|
|
440
|
-
)
|
|
544
|
+
stmt = stmt.where(table.c.session_data.like(f"%{session_name}%"))
|
|
441
545
|
if session_type is not None:
|
|
442
546
|
stmt = stmt.where(table.c.session_type == session_type.value)
|
|
443
547
|
|
|
@@ -459,8 +563,10 @@ class SqliteDb(BaseDb):
|
|
|
459
563
|
return [] if deserialize else ([], 0)
|
|
460
564
|
|
|
461
565
|
sessions_raw = [deserialize_session_json_fields(dict(record._mapping)) for record in records]
|
|
462
|
-
if not
|
|
566
|
+
if not deserialize:
|
|
463
567
|
return sessions_raw, total_count
|
|
568
|
+
if not sessions_raw:
|
|
569
|
+
return []
|
|
464
570
|
|
|
465
571
|
if session_type == SessionType.AGENT:
|
|
466
572
|
return [AgentSession.from_dict(record) for record in sessions_raw] # type: ignore
|
|
@@ -473,10 +579,14 @@ class SqliteDb(BaseDb):
|
|
|
473
579
|
|
|
474
580
|
except Exception as e:
|
|
475
581
|
log_debug(f"Exception reading from sessions table: {e}")
|
|
476
|
-
|
|
582
|
+
raise e
|
|
477
583
|
|
|
478
584
|
def rename_session(
|
|
479
|
-
self,
|
|
585
|
+
self,
|
|
586
|
+
session_id: str,
|
|
587
|
+
session_type: SessionType,
|
|
588
|
+
session_name: str,
|
|
589
|
+
deserialize: Optional[bool] = True,
|
|
480
590
|
) -> Optional[Union[Session, Dict[str, Any]]]:
|
|
481
591
|
"""
|
|
482
592
|
Rename a session in the database.
|
|
@@ -496,47 +606,24 @@ class SqliteDb(BaseDb):
|
|
|
496
606
|
Exception: If an error occurs during renaming.
|
|
497
607
|
"""
|
|
498
608
|
try:
|
|
499
|
-
|
|
500
|
-
|
|
609
|
+
# Get the current session as a deserialized object
|
|
610
|
+
# Get the session record
|
|
611
|
+
session = self.get_session(session_id, session_type, deserialize=True)
|
|
612
|
+
if session is None:
|
|
501
613
|
return None
|
|
502
614
|
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
.values(session_data=func.json_set(table.c.session_data, "$.session_name", session_name))
|
|
509
|
-
)
|
|
510
|
-
result = sess.execute(stmt)
|
|
511
|
-
|
|
512
|
-
# Check if any rows were affected
|
|
513
|
-
if result.rowcount == 0:
|
|
514
|
-
return None
|
|
515
|
-
|
|
516
|
-
# Fetch the updated row
|
|
517
|
-
select_stmt = select(table).where(table.c.session_id == session_id)
|
|
518
|
-
row = sess.execute(select_stmt).fetchone()
|
|
519
|
-
|
|
520
|
-
if not row:
|
|
521
|
-
return None
|
|
615
|
+
session = cast(Session, session)
|
|
616
|
+
# Update the session name
|
|
617
|
+
if session.session_data is None:
|
|
618
|
+
session.session_data = {}
|
|
619
|
+
session.session_data["session_name"] = session_name
|
|
522
620
|
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
return session_raw
|
|
526
|
-
|
|
527
|
-
# Return the appropriate session type
|
|
528
|
-
if session_type == SessionType.AGENT:
|
|
529
|
-
return AgentSession.from_dict(session_raw)
|
|
530
|
-
elif session_type == SessionType.TEAM:
|
|
531
|
-
return TeamSession.from_dict(session_raw)
|
|
532
|
-
elif session_type == SessionType.WORKFLOW:
|
|
533
|
-
return WorkflowSession.from_dict(session_raw)
|
|
534
|
-
else:
|
|
535
|
-
raise ValueError(f"Invalid session type: {session_type}")
|
|
621
|
+
# Upsert the updated session back to the database
|
|
622
|
+
return self.upsert_session(session, deserialize=deserialize)
|
|
536
623
|
|
|
537
624
|
except Exception as e:
|
|
538
625
|
log_error(f"Exception renaming session: {e}")
|
|
539
|
-
|
|
626
|
+
raise e
|
|
540
627
|
|
|
541
628
|
def upsert_session(
|
|
542
629
|
self, session: Session, deserialize: Optional[bool] = True
|
|
@@ -677,13 +764,246 @@ class SqliteDb(BaseDb):
|
|
|
677
764
|
|
|
678
765
|
except Exception as e:
|
|
679
766
|
log_warning(f"Exception upserting into table: {e}")
|
|
680
|
-
|
|
767
|
+
raise e
|
|
768
|
+
|
|
769
|
+
def upsert_sessions(
|
|
770
|
+
self,
|
|
771
|
+
sessions: List[Session],
|
|
772
|
+
deserialize: Optional[bool] = True,
|
|
773
|
+
preserve_updated_at: bool = False,
|
|
774
|
+
) -> List[Union[Session, Dict[str, Any]]]:
|
|
775
|
+
"""
|
|
776
|
+
Bulk upsert multiple sessions for improved performance on large datasets.
|
|
777
|
+
|
|
778
|
+
Args:
|
|
779
|
+
sessions (List[Session]): List of sessions to upsert.
|
|
780
|
+
deserialize (Optional[bool]): Whether to deserialize the sessions. Defaults to True.
|
|
781
|
+
preserve_updated_at (bool): If True, preserve the updated_at from the session object.
|
|
782
|
+
|
|
783
|
+
Returns:
|
|
784
|
+
List[Union[Session, Dict[str, Any]]]: List of upserted sessions.
|
|
785
|
+
|
|
786
|
+
Raises:
|
|
787
|
+
Exception: If an error occurs during bulk upsert.
|
|
788
|
+
"""
|
|
789
|
+
if not sessions:
|
|
790
|
+
return []
|
|
791
|
+
|
|
792
|
+
try:
|
|
793
|
+
table = self._get_table(table_type="sessions", create_table_if_not_found=True)
|
|
794
|
+
if table is None:
|
|
795
|
+
log_info("Sessions table not available, falling back to individual upserts")
|
|
796
|
+
return [
|
|
797
|
+
result
|
|
798
|
+
for session in sessions
|
|
799
|
+
if session is not None
|
|
800
|
+
for result in [self.upsert_session(session, deserialize=deserialize)]
|
|
801
|
+
if result is not None
|
|
802
|
+
]
|
|
803
|
+
|
|
804
|
+
# Group sessions by type for batch processing
|
|
805
|
+
agent_sessions = []
|
|
806
|
+
team_sessions = []
|
|
807
|
+
workflow_sessions = []
|
|
808
|
+
|
|
809
|
+
for session in sessions:
|
|
810
|
+
if isinstance(session, AgentSession):
|
|
811
|
+
agent_sessions.append(session)
|
|
812
|
+
elif isinstance(session, TeamSession):
|
|
813
|
+
team_sessions.append(session)
|
|
814
|
+
elif isinstance(session, WorkflowSession):
|
|
815
|
+
workflow_sessions.append(session)
|
|
816
|
+
|
|
817
|
+
results: List[Union[Session, Dict[str, Any]]] = []
|
|
818
|
+
|
|
819
|
+
with self.Session() as sess, sess.begin():
|
|
820
|
+
# Bulk upsert agent sessions
|
|
821
|
+
if agent_sessions:
|
|
822
|
+
agent_data = []
|
|
823
|
+
for session in agent_sessions:
|
|
824
|
+
serialized_session = serialize_session_json_fields(session.to_dict())
|
|
825
|
+
# Use preserved updated_at if flag is set and value exists, otherwise use current time
|
|
826
|
+
updated_at = serialized_session.get("updated_at") if preserve_updated_at else int(time.time())
|
|
827
|
+
agent_data.append(
|
|
828
|
+
{
|
|
829
|
+
"session_id": serialized_session.get("session_id"),
|
|
830
|
+
"session_type": SessionType.AGENT.value,
|
|
831
|
+
"agent_id": serialized_session.get("agent_id"),
|
|
832
|
+
"user_id": serialized_session.get("user_id"),
|
|
833
|
+
"agent_data": serialized_session.get("agent_data"),
|
|
834
|
+
"session_data": serialized_session.get("session_data"),
|
|
835
|
+
"metadata": serialized_session.get("metadata"),
|
|
836
|
+
"runs": serialized_session.get("runs"),
|
|
837
|
+
"summary": serialized_session.get("summary"),
|
|
838
|
+
"created_at": serialized_session.get("created_at"),
|
|
839
|
+
"updated_at": updated_at,
|
|
840
|
+
}
|
|
841
|
+
)
|
|
842
|
+
|
|
843
|
+
if agent_data:
|
|
844
|
+
stmt = sqlite.insert(table)
|
|
845
|
+
stmt = stmt.on_conflict_do_update(
|
|
846
|
+
index_elements=["session_id"],
|
|
847
|
+
set_=dict(
|
|
848
|
+
agent_id=stmt.excluded.agent_id,
|
|
849
|
+
user_id=stmt.excluded.user_id,
|
|
850
|
+
agent_data=stmt.excluded.agent_data,
|
|
851
|
+
session_data=stmt.excluded.session_data,
|
|
852
|
+
metadata=stmt.excluded.metadata,
|
|
853
|
+
runs=stmt.excluded.runs,
|
|
854
|
+
summary=stmt.excluded.summary,
|
|
855
|
+
updated_at=stmt.excluded.updated_at,
|
|
856
|
+
),
|
|
857
|
+
)
|
|
858
|
+
sess.execute(stmt, agent_data)
|
|
859
|
+
|
|
860
|
+
# Fetch the results for agent sessions
|
|
861
|
+
agent_ids = [session.session_id for session in agent_sessions]
|
|
862
|
+
select_stmt = select(table).where(table.c.session_id.in_(agent_ids))
|
|
863
|
+
result = sess.execute(select_stmt).fetchall()
|
|
864
|
+
|
|
865
|
+
for row in result:
|
|
866
|
+
session_dict = deserialize_session_json_fields(dict(row._mapping))
|
|
867
|
+
if deserialize:
|
|
868
|
+
deserialized_agent_session = AgentSession.from_dict(session_dict)
|
|
869
|
+
if deserialized_agent_session is None:
|
|
870
|
+
continue
|
|
871
|
+
results.append(deserialized_agent_session)
|
|
872
|
+
else:
|
|
873
|
+
results.append(session_dict)
|
|
874
|
+
|
|
875
|
+
# Bulk upsert team sessions
|
|
876
|
+
if team_sessions:
|
|
877
|
+
team_data = []
|
|
878
|
+
for session in team_sessions:
|
|
879
|
+
serialized_session = serialize_session_json_fields(session.to_dict())
|
|
880
|
+
# Use preserved updated_at if flag is set and value exists, otherwise use current time
|
|
881
|
+
updated_at = serialized_session.get("updated_at") if preserve_updated_at else int(time.time())
|
|
882
|
+
team_data.append(
|
|
883
|
+
{
|
|
884
|
+
"session_id": serialized_session.get("session_id"),
|
|
885
|
+
"session_type": SessionType.TEAM.value,
|
|
886
|
+
"team_id": serialized_session.get("team_id"),
|
|
887
|
+
"user_id": serialized_session.get("user_id"),
|
|
888
|
+
"runs": serialized_session.get("runs"),
|
|
889
|
+
"summary": serialized_session.get("summary"),
|
|
890
|
+
"created_at": serialized_session.get("created_at"),
|
|
891
|
+
"updated_at": updated_at,
|
|
892
|
+
"team_data": serialized_session.get("team_data"),
|
|
893
|
+
"session_data": serialized_session.get("session_data"),
|
|
894
|
+
"metadata": serialized_session.get("metadata"),
|
|
895
|
+
}
|
|
896
|
+
)
|
|
897
|
+
|
|
898
|
+
if team_data:
|
|
899
|
+
stmt = sqlite.insert(table)
|
|
900
|
+
stmt = stmt.on_conflict_do_update(
|
|
901
|
+
index_elements=["session_id"],
|
|
902
|
+
set_=dict(
|
|
903
|
+
team_id=stmt.excluded.team_id,
|
|
904
|
+
user_id=stmt.excluded.user_id,
|
|
905
|
+
team_data=stmt.excluded.team_data,
|
|
906
|
+
session_data=stmt.excluded.session_data,
|
|
907
|
+
metadata=stmt.excluded.metadata,
|
|
908
|
+
runs=stmt.excluded.runs,
|
|
909
|
+
summary=stmt.excluded.summary,
|
|
910
|
+
updated_at=stmt.excluded.updated_at,
|
|
911
|
+
),
|
|
912
|
+
)
|
|
913
|
+
sess.execute(stmt, team_data)
|
|
914
|
+
|
|
915
|
+
# Fetch the results for team sessions
|
|
916
|
+
team_ids = [session.session_id for session in team_sessions]
|
|
917
|
+
select_stmt = select(table).where(table.c.session_id.in_(team_ids))
|
|
918
|
+
result = sess.execute(select_stmt).fetchall()
|
|
919
|
+
|
|
920
|
+
for row in result:
|
|
921
|
+
session_dict = deserialize_session_json_fields(dict(row._mapping))
|
|
922
|
+
if deserialize:
|
|
923
|
+
deserialized_team_session = TeamSession.from_dict(session_dict)
|
|
924
|
+
if deserialized_team_session is None:
|
|
925
|
+
continue
|
|
926
|
+
results.append(deserialized_team_session)
|
|
927
|
+
else:
|
|
928
|
+
results.append(session_dict)
|
|
929
|
+
|
|
930
|
+
# Bulk upsert workflow sessions
|
|
931
|
+
if workflow_sessions:
|
|
932
|
+
workflow_data = []
|
|
933
|
+
for session in workflow_sessions:
|
|
934
|
+
serialized_session = serialize_session_json_fields(session.to_dict())
|
|
935
|
+
# Use preserved updated_at if flag is set and value exists, otherwise use current time
|
|
936
|
+
updated_at = serialized_session.get("updated_at") if preserve_updated_at else int(time.time())
|
|
937
|
+
workflow_data.append(
|
|
938
|
+
{
|
|
939
|
+
"session_id": serialized_session.get("session_id"),
|
|
940
|
+
"session_type": SessionType.WORKFLOW.value,
|
|
941
|
+
"workflow_id": serialized_session.get("workflow_id"),
|
|
942
|
+
"user_id": serialized_session.get("user_id"),
|
|
943
|
+
"runs": serialized_session.get("runs"),
|
|
944
|
+
"summary": serialized_session.get("summary"),
|
|
945
|
+
"created_at": serialized_session.get("created_at"),
|
|
946
|
+
"updated_at": updated_at,
|
|
947
|
+
"workflow_data": serialized_session.get("workflow_data"),
|
|
948
|
+
"session_data": serialized_session.get("session_data"),
|
|
949
|
+
"metadata": serialized_session.get("metadata"),
|
|
950
|
+
}
|
|
951
|
+
)
|
|
952
|
+
|
|
953
|
+
if workflow_data:
|
|
954
|
+
stmt = sqlite.insert(table)
|
|
955
|
+
stmt = stmt.on_conflict_do_update(
|
|
956
|
+
index_elements=["session_id"],
|
|
957
|
+
set_=dict(
|
|
958
|
+
workflow_id=stmt.excluded.workflow_id,
|
|
959
|
+
user_id=stmt.excluded.user_id,
|
|
960
|
+
workflow_data=stmt.excluded.workflow_data,
|
|
961
|
+
session_data=stmt.excluded.session_data,
|
|
962
|
+
metadata=stmt.excluded.metadata,
|
|
963
|
+
runs=stmt.excluded.runs,
|
|
964
|
+
summary=stmt.excluded.summary,
|
|
965
|
+
updated_at=stmt.excluded.updated_at,
|
|
966
|
+
),
|
|
967
|
+
)
|
|
968
|
+
sess.execute(stmt, workflow_data)
|
|
969
|
+
|
|
970
|
+
# Fetch the results for workflow sessions
|
|
971
|
+
workflow_ids = [session.session_id for session in workflow_sessions]
|
|
972
|
+
select_stmt = select(table).where(table.c.session_id.in_(workflow_ids))
|
|
973
|
+
result = sess.execute(select_stmt).fetchall()
|
|
974
|
+
|
|
975
|
+
for row in result:
|
|
976
|
+
session_dict = deserialize_session_json_fields(dict(row._mapping))
|
|
977
|
+
if deserialize:
|
|
978
|
+
deserialized_workflow_session = WorkflowSession.from_dict(session_dict)
|
|
979
|
+
if deserialized_workflow_session is None:
|
|
980
|
+
continue
|
|
981
|
+
results.append(deserialized_workflow_session)
|
|
982
|
+
else:
|
|
983
|
+
results.append(session_dict)
|
|
984
|
+
|
|
985
|
+
return results
|
|
986
|
+
|
|
987
|
+
except Exception as e:
|
|
988
|
+
log_error(f"Exception during bulk session upsert, falling back to individual upserts: {e}")
|
|
989
|
+
# Fallback to individual upserts
|
|
990
|
+
return [
|
|
991
|
+
result
|
|
992
|
+
for session in sessions
|
|
993
|
+
if session is not None
|
|
994
|
+
for result in [self.upsert_session(session, deserialize=deserialize)]
|
|
995
|
+
if result is not None
|
|
996
|
+
]
|
|
681
997
|
|
|
682
998
|
# -- Memory methods --
|
|
683
999
|
|
|
684
|
-
def delete_user_memory(self, memory_id: str):
|
|
1000
|
+
def delete_user_memory(self, memory_id: str, user_id: Optional[str] = None):
|
|
685
1001
|
"""Delete a user memory from the database.
|
|
686
1002
|
|
|
1003
|
+
Args:
|
|
1004
|
+
memory_id (str): The ID of the memory to delete.
|
|
1005
|
+
user_id (Optional[str]): The user ID to filter by. Defaults to None.
|
|
1006
|
+
|
|
687
1007
|
Returns:
|
|
688
1008
|
bool: True if deletion was successful, False otherwise.
|
|
689
1009
|
|
|
@@ -697,6 +1017,8 @@ class SqliteDb(BaseDb):
|
|
|
697
1017
|
|
|
698
1018
|
with self.Session() as sess, sess.begin():
|
|
699
1019
|
delete_stmt = table.delete().where(table.c.memory_id == memory_id)
|
|
1020
|
+
if user_id is not None:
|
|
1021
|
+
delete_stmt = delete_stmt.where(table.c.user_id == user_id)
|
|
700
1022
|
result = sess.execute(delete_stmt)
|
|
701
1023
|
|
|
702
1024
|
success = result.rowcount > 0
|
|
@@ -707,12 +1029,14 @@ class SqliteDb(BaseDb):
|
|
|
707
1029
|
|
|
708
1030
|
except Exception as e:
|
|
709
1031
|
log_error(f"Error deleting user memory: {e}")
|
|
1032
|
+
raise e
|
|
710
1033
|
|
|
711
|
-
def delete_user_memories(self, memory_ids: List[str]) -> None:
|
|
1034
|
+
def delete_user_memories(self, memory_ids: List[str], user_id: Optional[str] = None) -> None:
|
|
712
1035
|
"""Delete user memories from the database.
|
|
713
1036
|
|
|
714
1037
|
Args:
|
|
715
1038
|
memory_ids (List[str]): The IDs of the memories to delete.
|
|
1039
|
+
user_id (Optional[str]): The user ID to filter by. Defaults to None.
|
|
716
1040
|
|
|
717
1041
|
Raises:
|
|
718
1042
|
Exception: If an error occurs during deletion.
|
|
@@ -724,12 +1048,15 @@ class SqliteDb(BaseDb):
|
|
|
724
1048
|
|
|
725
1049
|
with self.Session() as sess, sess.begin():
|
|
726
1050
|
delete_stmt = table.delete().where(table.c.memory_id.in_(memory_ids))
|
|
1051
|
+
if user_id is not None:
|
|
1052
|
+
delete_stmt = delete_stmt.where(table.c.user_id == user_id)
|
|
727
1053
|
result = sess.execute(delete_stmt)
|
|
728
1054
|
if result.rowcount == 0:
|
|
729
1055
|
log_debug(f"No user memories found with ids: {memory_ids}")
|
|
730
1056
|
|
|
731
1057
|
except Exception as e:
|
|
732
1058
|
log_error(f"Error deleting user memories: {e}")
|
|
1059
|
+
raise e
|
|
733
1060
|
|
|
734
1061
|
def get_all_memory_topics(self) -> List[str]:
|
|
735
1062
|
"""Get all memory topics from the database.
|
|
@@ -743,23 +1070,28 @@ class SqliteDb(BaseDb):
|
|
|
743
1070
|
return []
|
|
744
1071
|
|
|
745
1072
|
with self.Session() as sess, sess.begin():
|
|
746
|
-
|
|
1073
|
+
# Select topics from all results
|
|
1074
|
+
stmt = select(func.json_array_elements_text(table.c.topics)).select_from(table)
|
|
747
1075
|
result = sess.execute(stmt).fetchall()
|
|
748
1076
|
|
|
749
1077
|
return list(set([record[0] for record in result]))
|
|
750
1078
|
|
|
751
1079
|
except Exception as e:
|
|
752
1080
|
log_debug(f"Exception reading from memory table: {e}")
|
|
753
|
-
|
|
1081
|
+
raise e
|
|
754
1082
|
|
|
755
1083
|
def get_user_memory(
|
|
756
|
-
self,
|
|
1084
|
+
self,
|
|
1085
|
+
memory_id: str,
|
|
1086
|
+
deserialize: Optional[bool] = True,
|
|
1087
|
+
user_id: Optional[str] = None,
|
|
757
1088
|
) -> Optional[Union[UserMemory, Dict[str, Any]]]:
|
|
758
1089
|
"""Get a memory from the database.
|
|
759
1090
|
|
|
760
1091
|
Args:
|
|
761
1092
|
memory_id (str): The ID of the memory to get.
|
|
762
1093
|
deserialize (Optional[bool]): Whether to serialize the memory. Defaults to True.
|
|
1094
|
+
user_id (Optional[str]): The user ID to filter by. Defaults to None.
|
|
763
1095
|
|
|
764
1096
|
Returns:
|
|
765
1097
|
Optional[Union[UserMemory, Dict[str, Any]]]:
|
|
@@ -776,6 +1108,8 @@ class SqliteDb(BaseDb):
|
|
|
776
1108
|
|
|
777
1109
|
with self.Session() as sess, sess.begin():
|
|
778
1110
|
stmt = select(table).where(table.c.memory_id == memory_id)
|
|
1111
|
+
if user_id is not None:
|
|
1112
|
+
stmt = stmt.where(table.c.user_id == user_id)
|
|
779
1113
|
result = sess.execute(stmt).fetchone()
|
|
780
1114
|
if result is None:
|
|
781
1115
|
return None
|
|
@@ -788,7 +1122,7 @@ class SqliteDb(BaseDb):
|
|
|
788
1122
|
|
|
789
1123
|
except Exception as e:
|
|
790
1124
|
log_debug(f"Exception reading from memorytable: {e}")
|
|
791
|
-
|
|
1125
|
+
raise e
|
|
792
1126
|
|
|
793
1127
|
def get_user_memories(
|
|
794
1128
|
self,
|
|
@@ -872,7 +1206,7 @@ class SqliteDb(BaseDb):
|
|
|
872
1206
|
|
|
873
1207
|
except Exception as e:
|
|
874
1208
|
log_error(f"Error reading from memory table: {e}")
|
|
875
|
-
|
|
1209
|
+
raise e
|
|
876
1210
|
|
|
877
1211
|
def get_user_memory_stats(
|
|
878
1212
|
self,
|
|
@@ -941,7 +1275,7 @@ class SqliteDb(BaseDb):
|
|
|
941
1275
|
|
|
942
1276
|
except Exception as e:
|
|
943
1277
|
log_error(f"Error getting user memory stats: {e}")
|
|
944
|
-
|
|
1278
|
+
raise e
|
|
945
1279
|
|
|
946
1280
|
def upsert_user_memory(
|
|
947
1281
|
self, memory: UserMemory, deserialize: Optional[bool] = True
|
|
@@ -968,6 +1302,8 @@ class SqliteDb(BaseDb):
|
|
|
968
1302
|
if memory.memory_id is None:
|
|
969
1303
|
memory.memory_id = str(uuid4())
|
|
970
1304
|
|
|
1305
|
+
current_time = int(time.time())
|
|
1306
|
+
|
|
971
1307
|
with self.Session() as sess, sess.begin():
|
|
972
1308
|
stmt = sqlite.insert(table).values(
|
|
973
1309
|
user_id=memory.user_id,
|
|
@@ -977,7 +1313,9 @@ class SqliteDb(BaseDb):
|
|
|
977
1313
|
memory=memory.memory,
|
|
978
1314
|
topics=memory.topics,
|
|
979
1315
|
input=memory.input,
|
|
980
|
-
|
|
1316
|
+
feedback=memory.feedback,
|
|
1317
|
+
created_at=memory.created_at,
|
|
1318
|
+
updated_at=memory.created_at,
|
|
981
1319
|
)
|
|
982
1320
|
stmt = stmt.on_conflict_do_update( # type: ignore
|
|
983
1321
|
index_elements=["memory_id"],
|
|
@@ -985,7 +1323,12 @@ class SqliteDb(BaseDb):
|
|
|
985
1323
|
memory=memory.memory,
|
|
986
1324
|
topics=memory.topics,
|
|
987
1325
|
input=memory.input,
|
|
988
|
-
|
|
1326
|
+
agent_id=memory.agent_id,
|
|
1327
|
+
team_id=memory.team_id,
|
|
1328
|
+
feedback=memory.feedback,
|
|
1329
|
+
updated_at=current_time,
|
|
1330
|
+
# Preserve created_at on update - don't overwrite existing value
|
|
1331
|
+
created_at=table.c.created_at,
|
|
989
1332
|
),
|
|
990
1333
|
).returning(table)
|
|
991
1334
|
|
|
@@ -1003,7 +1346,113 @@ class SqliteDb(BaseDb):
|
|
|
1003
1346
|
|
|
1004
1347
|
except Exception as e:
|
|
1005
1348
|
log_error(f"Error upserting user memory: {e}")
|
|
1006
|
-
|
|
1349
|
+
raise e
|
|
1350
|
+
|
|
1351
|
+
def upsert_memories(
|
|
1352
|
+
self,
|
|
1353
|
+
memories: List[UserMemory],
|
|
1354
|
+
deserialize: Optional[bool] = True,
|
|
1355
|
+
preserve_updated_at: bool = False,
|
|
1356
|
+
) -> List[Union[UserMemory, Dict[str, Any]]]:
|
|
1357
|
+
"""
|
|
1358
|
+
Bulk upsert multiple user memories for improved performance on large datasets.
|
|
1359
|
+
|
|
1360
|
+
Args:
|
|
1361
|
+
memories (List[UserMemory]): List of memories to upsert.
|
|
1362
|
+
deserialize (Optional[bool]): Whether to deserialize the memories. Defaults to True.
|
|
1363
|
+
|
|
1364
|
+
Returns:
|
|
1365
|
+
List[Union[UserMemory, Dict[str, Any]]]: List of upserted memories.
|
|
1366
|
+
|
|
1367
|
+
Raises:
|
|
1368
|
+
Exception: If an error occurs during bulk upsert.
|
|
1369
|
+
"""
|
|
1370
|
+
if not memories:
|
|
1371
|
+
return []
|
|
1372
|
+
|
|
1373
|
+
try:
|
|
1374
|
+
table = self._get_table(table_type="memories", create_table_if_not_found=True)
|
|
1375
|
+
if table is None:
|
|
1376
|
+
log_info("Memories table not available, falling back to individual upserts")
|
|
1377
|
+
return [
|
|
1378
|
+
result
|
|
1379
|
+
for memory in memories
|
|
1380
|
+
if memory is not None
|
|
1381
|
+
for result in [self.upsert_user_memory(memory, deserialize=deserialize)]
|
|
1382
|
+
if result is not None
|
|
1383
|
+
]
|
|
1384
|
+
# Prepare bulk data
|
|
1385
|
+
bulk_data = []
|
|
1386
|
+
current_time = int(time.time())
|
|
1387
|
+
|
|
1388
|
+
for memory in memories:
|
|
1389
|
+
if memory.memory_id is None:
|
|
1390
|
+
memory.memory_id = str(uuid4())
|
|
1391
|
+
|
|
1392
|
+
# Use preserved updated_at if flag is set and value exists, otherwise use current time
|
|
1393
|
+
updated_at = memory.updated_at if preserve_updated_at else current_time
|
|
1394
|
+
|
|
1395
|
+
bulk_data.append(
|
|
1396
|
+
{
|
|
1397
|
+
"user_id": memory.user_id,
|
|
1398
|
+
"agent_id": memory.agent_id,
|
|
1399
|
+
"team_id": memory.team_id,
|
|
1400
|
+
"memory_id": memory.memory_id,
|
|
1401
|
+
"memory": memory.memory,
|
|
1402
|
+
"topics": memory.topics,
|
|
1403
|
+
"input": memory.input,
|
|
1404
|
+
"feedback": memory.feedback,
|
|
1405
|
+
"created_at": memory.created_at,
|
|
1406
|
+
"updated_at": updated_at,
|
|
1407
|
+
}
|
|
1408
|
+
)
|
|
1409
|
+
|
|
1410
|
+
results: List[Union[UserMemory, Dict[str, Any]]] = []
|
|
1411
|
+
|
|
1412
|
+
with self.Session() as sess, sess.begin():
|
|
1413
|
+
# Bulk upsert memories using SQLite ON CONFLICT DO UPDATE
|
|
1414
|
+
stmt = sqlite.insert(table)
|
|
1415
|
+
stmt = stmt.on_conflict_do_update(
|
|
1416
|
+
index_elements=["memory_id"],
|
|
1417
|
+
set_=dict(
|
|
1418
|
+
memory=stmt.excluded.memory,
|
|
1419
|
+
topics=stmt.excluded.topics,
|
|
1420
|
+
input=stmt.excluded.input,
|
|
1421
|
+
agent_id=stmt.excluded.agent_id,
|
|
1422
|
+
team_id=stmt.excluded.team_id,
|
|
1423
|
+
feedback=stmt.excluded.feedback,
|
|
1424
|
+
updated_at=stmt.excluded.updated_at,
|
|
1425
|
+
# Preserve created_at on update
|
|
1426
|
+
created_at=table.c.created_at,
|
|
1427
|
+
),
|
|
1428
|
+
)
|
|
1429
|
+
sess.execute(stmt, bulk_data)
|
|
1430
|
+
|
|
1431
|
+
# Fetch results
|
|
1432
|
+
memory_ids = [memory.memory_id for memory in memories if memory.memory_id]
|
|
1433
|
+
select_stmt = select(table).where(table.c.memory_id.in_(memory_ids))
|
|
1434
|
+
result = sess.execute(select_stmt).fetchall()
|
|
1435
|
+
|
|
1436
|
+
for row in result:
|
|
1437
|
+
memory_dict = dict(row._mapping)
|
|
1438
|
+
if deserialize:
|
|
1439
|
+
results.append(UserMemory.from_dict(memory_dict))
|
|
1440
|
+
else:
|
|
1441
|
+
results.append(memory_dict)
|
|
1442
|
+
|
|
1443
|
+
return results
|
|
1444
|
+
|
|
1445
|
+
except Exception as e:
|
|
1446
|
+
log_error(f"Exception during bulk memory upsert, falling back to individual upserts: {e}")
|
|
1447
|
+
|
|
1448
|
+
# Fallback to individual upserts
|
|
1449
|
+
return [
|
|
1450
|
+
result
|
|
1451
|
+
for memory in memories
|
|
1452
|
+
if memory is not None
|
|
1453
|
+
for result in [self.upsert_user_memory(memory, deserialize=deserialize)]
|
|
1454
|
+
if result is not None
|
|
1455
|
+
]
|
|
1007
1456
|
|
|
1008
1457
|
def clear_memories(self) -> None:
|
|
1009
1458
|
"""Delete all memories from the database.
|
|
@@ -1023,6 +1472,7 @@ class SqliteDb(BaseDb):
|
|
|
1023
1472
|
from agno.utils.log import log_warning
|
|
1024
1473
|
|
|
1025
1474
|
log_warning(f"Exception deleting all memories: {e}")
|
|
1475
|
+
raise e
|
|
1026
1476
|
|
|
1027
1477
|
# -- Metrics methods --
|
|
1028
1478
|
|
|
@@ -1066,7 +1516,7 @@ class SqliteDb(BaseDb):
|
|
|
1066
1516
|
|
|
1067
1517
|
except Exception as e:
|
|
1068
1518
|
log_error(f"Error reading from sessions table: {e}")
|
|
1069
|
-
|
|
1519
|
+
raise e
|
|
1070
1520
|
|
|
1071
1521
|
def _get_metrics_calculation_starting_date(self, table: Table) -> Optional[date]:
|
|
1072
1522
|
"""Get the first date for which metrics calculation is needed:
|
|
@@ -1139,7 +1589,9 @@ class SqliteDb(BaseDb):
|
|
|
1139
1589
|
start_timestamp=start_timestamp, end_timestamp=end_timestamp
|
|
1140
1590
|
)
|
|
1141
1591
|
all_sessions_data = fetch_all_sessions_data(
|
|
1142
|
-
sessions=sessions,
|
|
1592
|
+
sessions=sessions,
|
|
1593
|
+
dates_to_process=dates_to_process,
|
|
1594
|
+
start_timestamp=start_timestamp,
|
|
1143
1595
|
)
|
|
1144
1596
|
if not all_sessions_data:
|
|
1145
1597
|
log_info("No new session data found. Won't calculate metrics.")
|
|
@@ -1211,7 +1663,7 @@ class SqliteDb(BaseDb):
|
|
|
1211
1663
|
|
|
1212
1664
|
except Exception as e:
|
|
1213
1665
|
log_error(f"Error getting metrics: {e}")
|
|
1214
|
-
|
|
1666
|
+
raise e
|
|
1215
1667
|
|
|
1216
1668
|
# -- Knowledge methods --
|
|
1217
1669
|
|
|
@@ -1235,6 +1687,7 @@ class SqliteDb(BaseDb):
|
|
|
1235
1687
|
|
|
1236
1688
|
except Exception as e:
|
|
1237
1689
|
log_error(f"Error deleting knowledge content: {e}")
|
|
1690
|
+
raise e
|
|
1238
1691
|
|
|
1239
1692
|
def get_knowledge_content(self, id: str) -> Optional[KnowledgeRow]:
|
|
1240
1693
|
"""Get a knowledge row from the database.
|
|
@@ -1263,7 +1716,7 @@ class SqliteDb(BaseDb):
|
|
|
1263
1716
|
|
|
1264
1717
|
except Exception as e:
|
|
1265
1718
|
log_error(f"Error getting knowledge content: {e}")
|
|
1266
|
-
|
|
1719
|
+
raise e
|
|
1267
1720
|
|
|
1268
1721
|
def get_knowledge_contents(
|
|
1269
1722
|
self,
|
|
@@ -1313,7 +1766,7 @@ class SqliteDb(BaseDb):
|
|
|
1313
1766
|
|
|
1314
1767
|
except Exception as e:
|
|
1315
1768
|
log_error(f"Error getting knowledge contents: {e}")
|
|
1316
|
-
|
|
1769
|
+
raise e
|
|
1317
1770
|
|
|
1318
1771
|
def upsert_knowledge_content(self, knowledge_row: KnowledgeRow):
|
|
1319
1772
|
"""Upsert knowledge content in the database.
|
|
@@ -1341,6 +1794,7 @@ class SqliteDb(BaseDb):
|
|
|
1341
1794
|
"linked_to": knowledge_row.linked_to,
|
|
1342
1795
|
"access_count": knowledge_row.access_count,
|
|
1343
1796
|
"status": knowledge_row.status,
|
|
1797
|
+
"status_message": knowledge_row.status_message,
|
|
1344
1798
|
"created_at": knowledge_row.created_at,
|
|
1345
1799
|
"updated_at": knowledge_row.updated_at,
|
|
1346
1800
|
"external_id": knowledge_row.external_id,
|
|
@@ -1360,7 +1814,7 @@ class SqliteDb(BaseDb):
|
|
|
1360
1814
|
|
|
1361
1815
|
except Exception as e:
|
|
1362
1816
|
log_error(f"Error upserting knowledge content: {e}")
|
|
1363
|
-
|
|
1817
|
+
raise e
|
|
1364
1818
|
|
|
1365
1819
|
# -- Eval methods --
|
|
1366
1820
|
|
|
@@ -1384,7 +1838,11 @@ class SqliteDb(BaseDb):
|
|
|
1384
1838
|
with self.Session() as sess, sess.begin():
|
|
1385
1839
|
current_time = int(time.time())
|
|
1386
1840
|
stmt = sqlite.insert(table).values(
|
|
1387
|
-
{
|
|
1841
|
+
{
|
|
1842
|
+
"created_at": current_time,
|
|
1843
|
+
"updated_at": current_time,
|
|
1844
|
+
**eval_run.model_dump(),
|
|
1845
|
+
}
|
|
1388
1846
|
)
|
|
1389
1847
|
sess.execute(stmt)
|
|
1390
1848
|
sess.commit()
|
|
@@ -1395,7 +1853,7 @@ class SqliteDb(BaseDb):
|
|
|
1395
1853
|
|
|
1396
1854
|
except Exception as e:
|
|
1397
1855
|
log_error(f"Error creating eval run: {e}")
|
|
1398
|
-
|
|
1856
|
+
raise e
|
|
1399
1857
|
|
|
1400
1858
|
def delete_eval_run(self, eval_run_id: str) -> None:
|
|
1401
1859
|
"""Delete an eval run from the database.
|
|
@@ -1418,7 +1876,7 @@ class SqliteDb(BaseDb):
|
|
|
1418
1876
|
|
|
1419
1877
|
except Exception as e:
|
|
1420
1878
|
log_error(f"Error deleting eval run {eval_run_id}: {e}")
|
|
1421
|
-
raise
|
|
1879
|
+
raise e
|
|
1422
1880
|
|
|
1423
1881
|
def delete_eval_runs(self, eval_run_ids: List[str]) -> None:
|
|
1424
1882
|
"""Delete multiple eval runs from the database.
|
|
@@ -1441,7 +1899,7 @@ class SqliteDb(BaseDb):
|
|
|
1441
1899
|
|
|
1442
1900
|
except Exception as e:
|
|
1443
1901
|
log_error(f"Error deleting eval runs {eval_run_ids}: {e}")
|
|
1444
|
-
raise
|
|
1902
|
+
raise e
|
|
1445
1903
|
|
|
1446
1904
|
def get_eval_run(
|
|
1447
1905
|
self, eval_run_id: str, deserialize: Optional[bool] = True
|
|
@@ -1479,7 +1937,7 @@ class SqliteDb(BaseDb):
|
|
|
1479
1937
|
|
|
1480
1938
|
except Exception as e:
|
|
1481
1939
|
log_error(f"Exception getting eval run {eval_run_id}: {e}")
|
|
1482
|
-
|
|
1940
|
+
raise e
|
|
1483
1941
|
|
|
1484
1942
|
def get_eval_runs(
|
|
1485
1943
|
self,
|
|
@@ -1573,7 +2031,7 @@ class SqliteDb(BaseDb):
|
|
|
1573
2031
|
|
|
1574
2032
|
except Exception as e:
|
|
1575
2033
|
log_error(f"Exception getting eval runs: {e}")
|
|
1576
|
-
|
|
2034
|
+
raise e
|
|
1577
2035
|
|
|
1578
2036
|
def rename_eval_run(
|
|
1579
2037
|
self, eval_run_id: str, name: str, deserialize: Optional[bool] = True
|
|
@@ -1615,7 +2073,7 @@ class SqliteDb(BaseDb):
|
|
|
1615
2073
|
|
|
1616
2074
|
except Exception as e:
|
|
1617
2075
|
log_error(f"Error renaming eval run {eval_run_id}: {e}")
|
|
1618
|
-
raise
|
|
2076
|
+
raise e
|
|
1619
2077
|
|
|
1620
2078
|
# -- Migrations --
|
|
1621
2079
|
|
|
@@ -1658,19 +2116,250 @@ class SqliteDb(BaseDb):
|
|
|
1658
2116
|
if v1_table_type == "agent_sessions":
|
|
1659
2117
|
for session in sessions:
|
|
1660
2118
|
self.upsert_session(session)
|
|
1661
|
-
log_info(f"Migrated {len(sessions)} Agent sessions to table: {self.
|
|
2119
|
+
log_info(f"Migrated {len(sessions)} Agent sessions to table: {self.session_table_name}")
|
|
1662
2120
|
|
|
1663
2121
|
elif v1_table_type == "team_sessions":
|
|
1664
2122
|
for session in sessions:
|
|
1665
2123
|
self.upsert_session(session)
|
|
1666
|
-
log_info(f"Migrated {len(sessions)} Team sessions to table: {self.
|
|
2124
|
+
log_info(f"Migrated {len(sessions)} Team sessions to table: {self.session_table_name}")
|
|
1667
2125
|
|
|
1668
2126
|
elif v1_table_type == "workflow_sessions":
|
|
1669
2127
|
for session in sessions:
|
|
1670
2128
|
self.upsert_session(session)
|
|
1671
|
-
log_info(f"Migrated {len(sessions)} Workflow sessions to table: {self.
|
|
2129
|
+
log_info(f"Migrated {len(sessions)} Workflow sessions to table: {self.session_table_name}")
|
|
1672
2130
|
|
|
1673
2131
|
elif v1_table_type == "memories":
|
|
1674
2132
|
for memory in memories:
|
|
1675
2133
|
self.upsert_user_memory(memory)
|
|
1676
2134
|
log_info(f"Migrated {len(memories)} memories to table: {self.memory_table}")
|
|
2135
|
+
|
|
2136
|
+
# -- Culture methods --
|
|
2137
|
+
|
|
2138
|
+
def clear_cultural_knowledge(self) -> None:
|
|
2139
|
+
"""Delete all cultural artifacts from the database.
|
|
2140
|
+
|
|
2141
|
+
Raises:
|
|
2142
|
+
Exception: If an error occurs during deletion.
|
|
2143
|
+
"""
|
|
2144
|
+
try:
|
|
2145
|
+
table = self._get_table(table_type="culture")
|
|
2146
|
+
if table is None:
|
|
2147
|
+
return
|
|
2148
|
+
|
|
2149
|
+
with self.Session() as sess, sess.begin():
|
|
2150
|
+
sess.execute(table.delete())
|
|
2151
|
+
|
|
2152
|
+
except Exception as e:
|
|
2153
|
+
from agno.utils.log import log_warning
|
|
2154
|
+
|
|
2155
|
+
log_warning(f"Exception deleting all cultural artifacts: {e}")
|
|
2156
|
+
raise e
|
|
2157
|
+
|
|
2158
|
+
def delete_cultural_knowledge(self, id: str) -> None:
|
|
2159
|
+
"""Delete a cultural artifact from the database.
|
|
2160
|
+
|
|
2161
|
+
Args:
|
|
2162
|
+
id (str): The ID of the cultural artifact to delete.
|
|
2163
|
+
|
|
2164
|
+
Raises:
|
|
2165
|
+
Exception: If an error occurs during deletion.
|
|
2166
|
+
"""
|
|
2167
|
+
try:
|
|
2168
|
+
table = self._get_table(table_type="culture")
|
|
2169
|
+
if table is None:
|
|
2170
|
+
return
|
|
2171
|
+
|
|
2172
|
+
with self.Session() as sess, sess.begin():
|
|
2173
|
+
delete_stmt = table.delete().where(table.c.id == id)
|
|
2174
|
+
result = sess.execute(delete_stmt)
|
|
2175
|
+
|
|
2176
|
+
success = result.rowcount > 0
|
|
2177
|
+
if success:
|
|
2178
|
+
log_debug(f"Successfully deleted cultural artifact id: {id}")
|
|
2179
|
+
else:
|
|
2180
|
+
log_debug(f"No cultural artifact found with id: {id}")
|
|
2181
|
+
|
|
2182
|
+
except Exception as e:
|
|
2183
|
+
log_error(f"Error deleting cultural artifact: {e}")
|
|
2184
|
+
raise e
|
|
2185
|
+
|
|
2186
|
+
def get_cultural_knowledge(
|
|
2187
|
+
self, id: str, deserialize: Optional[bool] = True
|
|
2188
|
+
) -> Optional[Union[CulturalKnowledge, Dict[str, Any]]]:
|
|
2189
|
+
"""Get a cultural artifact from the database.
|
|
2190
|
+
|
|
2191
|
+
Args:
|
|
2192
|
+
id (str): The ID of the cultural artifact to get.
|
|
2193
|
+
deserialize (Optional[bool]): Whether to serialize the cultural artifact. Defaults to True.
|
|
2194
|
+
|
|
2195
|
+
Returns:
|
|
2196
|
+
Optional[CulturalKnowledge]: The cultural artifact, or None if it doesn't exist.
|
|
2197
|
+
|
|
2198
|
+
Raises:
|
|
2199
|
+
Exception: If an error occurs during retrieval.
|
|
2200
|
+
"""
|
|
2201
|
+
try:
|
|
2202
|
+
table = self._get_table(table_type="culture")
|
|
2203
|
+
if table is None:
|
|
2204
|
+
return None
|
|
2205
|
+
|
|
2206
|
+
with self.Session() as sess, sess.begin():
|
|
2207
|
+
stmt = select(table).where(table.c.id == id)
|
|
2208
|
+
result = sess.execute(stmt).fetchone()
|
|
2209
|
+
if result is None:
|
|
2210
|
+
return None
|
|
2211
|
+
|
|
2212
|
+
db_row = dict(result._mapping)
|
|
2213
|
+
if not db_row or not deserialize:
|
|
2214
|
+
return db_row
|
|
2215
|
+
|
|
2216
|
+
return deserialize_cultural_knowledge_from_db(db_row)
|
|
2217
|
+
|
|
2218
|
+
except Exception as e:
|
|
2219
|
+
log_error(f"Exception reading from cultural artifacts table: {e}")
|
|
2220
|
+
raise e
|
|
2221
|
+
|
|
2222
|
+
def get_all_cultural_knowledge(
|
|
2223
|
+
self,
|
|
2224
|
+
name: Optional[str] = None,
|
|
2225
|
+
agent_id: Optional[str] = None,
|
|
2226
|
+
team_id: Optional[str] = None,
|
|
2227
|
+
limit: Optional[int] = None,
|
|
2228
|
+
page: Optional[int] = None,
|
|
2229
|
+
sort_by: Optional[str] = None,
|
|
2230
|
+
sort_order: Optional[str] = None,
|
|
2231
|
+
deserialize: Optional[bool] = True,
|
|
2232
|
+
) -> Union[List[CulturalKnowledge], Tuple[List[Dict[str, Any]], int]]:
|
|
2233
|
+
"""Get all cultural artifacts from the database as CulturalNotion objects.
|
|
2234
|
+
|
|
2235
|
+
Args:
|
|
2236
|
+
name (Optional[str]): The name of the cultural artifact to filter by.
|
|
2237
|
+
agent_id (Optional[str]): The ID of the agent to filter by.
|
|
2238
|
+
team_id (Optional[str]): The ID of the team to filter by.
|
|
2239
|
+
limit (Optional[int]): The maximum number of cultural artifacts to return.
|
|
2240
|
+
page (Optional[int]): The page number.
|
|
2241
|
+
sort_by (Optional[str]): The column to sort by.
|
|
2242
|
+
sort_order (Optional[str]): The order to sort by.
|
|
2243
|
+
deserialize (Optional[bool]): Whether to serialize the cultural artifacts. Defaults to True.
|
|
2244
|
+
|
|
2245
|
+
Returns:
|
|
2246
|
+
Union[List[CulturalKnowledge], Tuple[List[Dict[str, Any]], int]]:
|
|
2247
|
+
- When deserialize=True: List of CulturalNotion objects
|
|
2248
|
+
- When deserialize=False: List of CulturalNotion dictionaries and total count
|
|
2249
|
+
|
|
2250
|
+
Raises:
|
|
2251
|
+
Exception: If an error occurs during retrieval.
|
|
2252
|
+
"""
|
|
2253
|
+
try:
|
|
2254
|
+
table = self._get_table(table_type="culture")
|
|
2255
|
+
if table is None:
|
|
2256
|
+
return [] if deserialize else ([], 0)
|
|
2257
|
+
|
|
2258
|
+
with self.Session() as sess, sess.begin():
|
|
2259
|
+
stmt = select(table)
|
|
2260
|
+
|
|
2261
|
+
# Filtering
|
|
2262
|
+
if name is not None:
|
|
2263
|
+
stmt = stmt.where(table.c.name == name)
|
|
2264
|
+
if agent_id is not None:
|
|
2265
|
+
stmt = stmt.where(table.c.agent_id == agent_id)
|
|
2266
|
+
if team_id is not None:
|
|
2267
|
+
stmt = stmt.where(table.c.team_id == team_id)
|
|
2268
|
+
|
|
2269
|
+
# Get total count after applying filtering
|
|
2270
|
+
count_stmt = select(func.count()).select_from(stmt.alias())
|
|
2271
|
+
total_count = sess.execute(count_stmt).scalar()
|
|
2272
|
+
|
|
2273
|
+
# Sorting
|
|
2274
|
+
stmt = apply_sorting(stmt, table, sort_by, sort_order)
|
|
2275
|
+
# Paginating
|
|
2276
|
+
if limit is not None:
|
|
2277
|
+
stmt = stmt.limit(limit)
|
|
2278
|
+
if page is not None:
|
|
2279
|
+
stmt = stmt.offset((page - 1) * limit)
|
|
2280
|
+
|
|
2281
|
+
result = sess.execute(stmt).fetchall()
|
|
2282
|
+
if not result:
|
|
2283
|
+
return [] if deserialize else ([], 0)
|
|
2284
|
+
|
|
2285
|
+
db_rows = [dict(record._mapping) for record in result]
|
|
2286
|
+
|
|
2287
|
+
if not deserialize:
|
|
2288
|
+
return db_rows, total_count
|
|
2289
|
+
|
|
2290
|
+
return [deserialize_cultural_knowledge_from_db(row) for row in db_rows]
|
|
2291
|
+
|
|
2292
|
+
except Exception as e:
|
|
2293
|
+
log_error(f"Error reading from cultural artifacts table: {e}")
|
|
2294
|
+
raise e
|
|
2295
|
+
|
|
2296
|
+
def upsert_cultural_knowledge(
|
|
2297
|
+
self, cultural_knowledge: CulturalKnowledge, deserialize: Optional[bool] = True
|
|
2298
|
+
) -> Optional[Union[CulturalKnowledge, Dict[str, Any]]]:
|
|
2299
|
+
"""Upsert a cultural artifact into the database.
|
|
2300
|
+
|
|
2301
|
+
Args:
|
|
2302
|
+
cultural_knowledge (CulturalKnowledge): The cultural artifact to upsert.
|
|
2303
|
+
deserialize (Optional[bool]): Whether to serialize the cultural artifact. Defaults to True.
|
|
2304
|
+
|
|
2305
|
+
Returns:
|
|
2306
|
+
Optional[Union[CulturalNotion, Dict[str, Any]]]:
|
|
2307
|
+
- When deserialize=True: CulturalNotion object
|
|
2308
|
+
- When deserialize=False: CulturalNotion dictionary
|
|
2309
|
+
|
|
2310
|
+
Raises:
|
|
2311
|
+
Exception: If an error occurs during upsert.
|
|
2312
|
+
"""
|
|
2313
|
+
try:
|
|
2314
|
+
table = self._get_table(table_type="culture", create_table_if_not_found=True)
|
|
2315
|
+
if table is None:
|
|
2316
|
+
return None
|
|
2317
|
+
|
|
2318
|
+
if cultural_knowledge.id is None:
|
|
2319
|
+
cultural_knowledge.id = str(uuid4())
|
|
2320
|
+
|
|
2321
|
+
# Serialize content, categories, and notes into a JSON string for DB storage (SQLite requires strings)
|
|
2322
|
+
content_json_str = serialize_cultural_knowledge_for_db(cultural_knowledge)
|
|
2323
|
+
|
|
2324
|
+
with self.Session() as sess, sess.begin():
|
|
2325
|
+
stmt = sqlite.insert(table).values(
|
|
2326
|
+
id=cultural_knowledge.id,
|
|
2327
|
+
name=cultural_knowledge.name,
|
|
2328
|
+
summary=cultural_knowledge.summary,
|
|
2329
|
+
content=content_json_str,
|
|
2330
|
+
metadata=cultural_knowledge.metadata,
|
|
2331
|
+
input=cultural_knowledge.input,
|
|
2332
|
+
created_at=cultural_knowledge.created_at,
|
|
2333
|
+
updated_at=int(time.time()),
|
|
2334
|
+
agent_id=cultural_knowledge.agent_id,
|
|
2335
|
+
team_id=cultural_knowledge.team_id,
|
|
2336
|
+
)
|
|
2337
|
+
stmt = stmt.on_conflict_do_update( # type: ignore
|
|
2338
|
+
index_elements=["id"],
|
|
2339
|
+
set_=dict(
|
|
2340
|
+
name=cultural_knowledge.name,
|
|
2341
|
+
summary=cultural_knowledge.summary,
|
|
2342
|
+
content=content_json_str,
|
|
2343
|
+
metadata=cultural_knowledge.metadata,
|
|
2344
|
+
input=cultural_knowledge.input,
|
|
2345
|
+
updated_at=int(time.time()),
|
|
2346
|
+
agent_id=cultural_knowledge.agent_id,
|
|
2347
|
+
team_id=cultural_knowledge.team_id,
|
|
2348
|
+
),
|
|
2349
|
+
).returning(table)
|
|
2350
|
+
|
|
2351
|
+
result = sess.execute(stmt)
|
|
2352
|
+
row = result.fetchone()
|
|
2353
|
+
|
|
2354
|
+
if row is None:
|
|
2355
|
+
return None
|
|
2356
|
+
|
|
2357
|
+
db_row: Dict[str, Any] = dict(row._mapping)
|
|
2358
|
+
if not db_row or not deserialize:
|
|
2359
|
+
return db_row
|
|
2360
|
+
|
|
2361
|
+
return deserialize_cultural_knowledge_from_db(db_row)
|
|
2362
|
+
|
|
2363
|
+
except Exception as e:
|
|
2364
|
+
log_error(f"Error upserting cultural knowledge: {e}")
|
|
2365
|
+
raise e
|