agno 0.1.2__py3-none-any.whl → 2.3.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agno/__init__.py +8 -0
- agno/agent/__init__.py +44 -5
- agno/agent/agent.py +10531 -2975
- agno/api/agent.py +14 -53
- agno/api/api.py +7 -46
- agno/api/evals.py +22 -0
- agno/api/os.py +17 -0
- agno/api/routes.py +6 -25
- agno/api/schemas/__init__.py +9 -0
- agno/api/schemas/agent.py +6 -9
- agno/api/schemas/evals.py +16 -0
- agno/api/schemas/os.py +14 -0
- agno/api/schemas/team.py +10 -10
- agno/api/schemas/utils.py +21 -0
- agno/api/schemas/workflows.py +16 -0
- agno/api/settings.py +53 -0
- agno/api/team.py +22 -26
- agno/api/workflow.py +28 -0
- agno/cloud/aws/base.py +214 -0
- agno/cloud/aws/s3/__init__.py +2 -0
- agno/cloud/aws/s3/api_client.py +43 -0
- agno/cloud/aws/s3/bucket.py +195 -0
- agno/cloud/aws/s3/object.py +57 -0
- agno/compression/__init__.py +3 -0
- agno/compression/manager.py +247 -0
- agno/culture/__init__.py +3 -0
- agno/culture/manager.py +956 -0
- agno/db/__init__.py +24 -0
- agno/db/async_postgres/__init__.py +3 -0
- agno/db/base.py +946 -0
- agno/db/dynamo/__init__.py +3 -0
- agno/db/dynamo/dynamo.py +2781 -0
- agno/db/dynamo/schemas.py +442 -0
- agno/db/dynamo/utils.py +743 -0
- agno/db/firestore/__init__.py +3 -0
- agno/db/firestore/firestore.py +2379 -0
- agno/db/firestore/schemas.py +181 -0
- agno/db/firestore/utils.py +376 -0
- agno/db/gcs_json/__init__.py +3 -0
- agno/db/gcs_json/gcs_json_db.py +1791 -0
- agno/db/gcs_json/utils.py +228 -0
- agno/db/in_memory/__init__.py +3 -0
- agno/db/in_memory/in_memory_db.py +1312 -0
- agno/db/in_memory/utils.py +230 -0
- agno/db/json/__init__.py +3 -0
- agno/db/json/json_db.py +1777 -0
- agno/db/json/utils.py +230 -0
- agno/db/migrations/manager.py +199 -0
- agno/db/migrations/v1_to_v2.py +635 -0
- agno/db/migrations/versions/v2_3_0.py +938 -0
- agno/db/mongo/__init__.py +17 -0
- agno/db/mongo/async_mongo.py +2760 -0
- agno/db/mongo/mongo.py +2597 -0
- agno/db/mongo/schemas.py +119 -0
- agno/db/mongo/utils.py +276 -0
- agno/db/mysql/__init__.py +4 -0
- agno/db/mysql/async_mysql.py +2912 -0
- agno/db/mysql/mysql.py +2923 -0
- agno/db/mysql/schemas.py +186 -0
- agno/db/mysql/utils.py +488 -0
- agno/db/postgres/__init__.py +4 -0
- agno/db/postgres/async_postgres.py +2579 -0
- agno/db/postgres/postgres.py +2870 -0
- agno/db/postgres/schemas.py +187 -0
- agno/db/postgres/utils.py +442 -0
- agno/db/redis/__init__.py +3 -0
- agno/db/redis/redis.py +2141 -0
- agno/db/redis/schemas.py +159 -0
- agno/db/redis/utils.py +346 -0
- agno/db/schemas/__init__.py +4 -0
- agno/db/schemas/culture.py +120 -0
- agno/db/schemas/evals.py +34 -0
- agno/db/schemas/knowledge.py +40 -0
- agno/db/schemas/memory.py +61 -0
- agno/db/singlestore/__init__.py +3 -0
- agno/db/singlestore/schemas.py +179 -0
- agno/db/singlestore/singlestore.py +2877 -0
- agno/db/singlestore/utils.py +384 -0
- agno/db/sqlite/__init__.py +4 -0
- agno/db/sqlite/async_sqlite.py +2911 -0
- agno/db/sqlite/schemas.py +181 -0
- agno/db/sqlite/sqlite.py +2908 -0
- agno/db/sqlite/utils.py +429 -0
- agno/db/surrealdb/__init__.py +3 -0
- agno/db/surrealdb/metrics.py +292 -0
- agno/db/surrealdb/models.py +334 -0
- agno/db/surrealdb/queries.py +71 -0
- agno/db/surrealdb/surrealdb.py +1908 -0
- agno/db/surrealdb/utils.py +147 -0
- agno/db/utils.py +118 -0
- agno/eval/__init__.py +24 -0
- agno/eval/accuracy.py +666 -276
- agno/eval/agent_as_judge.py +861 -0
- agno/eval/base.py +29 -0
- agno/eval/performance.py +779 -0
- agno/eval/reliability.py +241 -62
- agno/eval/utils.py +120 -0
- agno/exceptions.py +143 -1
- agno/filters.py +354 -0
- agno/guardrails/__init__.py +6 -0
- agno/guardrails/base.py +19 -0
- agno/guardrails/openai.py +144 -0
- agno/guardrails/pii.py +94 -0
- agno/guardrails/prompt_injection.py +52 -0
- agno/hooks/__init__.py +3 -0
- agno/hooks/decorator.py +164 -0
- agno/integrations/discord/__init__.py +3 -0
- agno/integrations/discord/client.py +203 -0
- agno/knowledge/__init__.py +5 -1
- agno/{document → knowledge}/chunking/agentic.py +22 -14
- agno/{document → knowledge}/chunking/document.py +2 -2
- agno/{document → knowledge}/chunking/fixed.py +7 -6
- agno/knowledge/chunking/markdown.py +151 -0
- agno/{document → knowledge}/chunking/recursive.py +15 -3
- agno/knowledge/chunking/row.py +39 -0
- agno/knowledge/chunking/semantic.py +91 -0
- agno/knowledge/chunking/strategy.py +165 -0
- agno/knowledge/content.py +74 -0
- agno/knowledge/document/__init__.py +5 -0
- agno/{document → knowledge/document}/base.py +12 -2
- agno/knowledge/embedder/__init__.py +5 -0
- agno/knowledge/embedder/aws_bedrock.py +343 -0
- agno/knowledge/embedder/azure_openai.py +210 -0
- agno/{embedder → knowledge/embedder}/base.py +8 -0
- agno/knowledge/embedder/cohere.py +323 -0
- agno/knowledge/embedder/fastembed.py +62 -0
- agno/{embedder → knowledge/embedder}/fireworks.py +1 -1
- agno/knowledge/embedder/google.py +258 -0
- agno/knowledge/embedder/huggingface.py +94 -0
- agno/knowledge/embedder/jina.py +182 -0
- agno/knowledge/embedder/langdb.py +22 -0
- agno/knowledge/embedder/mistral.py +206 -0
- agno/knowledge/embedder/nebius.py +13 -0
- agno/knowledge/embedder/ollama.py +154 -0
- agno/knowledge/embedder/openai.py +195 -0
- agno/knowledge/embedder/sentence_transformer.py +63 -0
- agno/{embedder → knowledge/embedder}/together.py +1 -1
- agno/knowledge/embedder/vllm.py +262 -0
- agno/knowledge/embedder/voyageai.py +165 -0
- agno/knowledge/knowledge.py +3006 -0
- agno/knowledge/reader/__init__.py +7 -0
- agno/knowledge/reader/arxiv_reader.py +81 -0
- agno/knowledge/reader/base.py +95 -0
- agno/knowledge/reader/csv_reader.py +164 -0
- agno/knowledge/reader/docx_reader.py +82 -0
- agno/knowledge/reader/field_labeled_csv_reader.py +290 -0
- agno/knowledge/reader/firecrawl_reader.py +201 -0
- agno/knowledge/reader/json_reader.py +88 -0
- agno/knowledge/reader/markdown_reader.py +137 -0
- agno/knowledge/reader/pdf_reader.py +431 -0
- agno/knowledge/reader/pptx_reader.py +101 -0
- agno/knowledge/reader/reader_factory.py +313 -0
- agno/knowledge/reader/s3_reader.py +89 -0
- agno/knowledge/reader/tavily_reader.py +193 -0
- agno/knowledge/reader/text_reader.py +127 -0
- agno/knowledge/reader/web_search_reader.py +325 -0
- agno/knowledge/reader/website_reader.py +455 -0
- agno/knowledge/reader/wikipedia_reader.py +91 -0
- agno/knowledge/reader/youtube_reader.py +78 -0
- agno/knowledge/remote_content/remote_content.py +88 -0
- agno/knowledge/reranker/__init__.py +3 -0
- agno/{reranker → knowledge/reranker}/base.py +1 -1
- agno/{reranker → knowledge/reranker}/cohere.py +2 -2
- agno/knowledge/reranker/infinity.py +195 -0
- agno/knowledge/reranker/sentence_transformer.py +54 -0
- agno/knowledge/types.py +39 -0
- agno/knowledge/utils.py +234 -0
- agno/media.py +439 -95
- agno/memory/__init__.py +16 -3
- agno/memory/manager.py +1474 -123
- agno/memory/strategies/__init__.py +15 -0
- agno/memory/strategies/base.py +66 -0
- agno/memory/strategies/summarize.py +196 -0
- agno/memory/strategies/types.py +37 -0
- agno/models/aimlapi/__init__.py +5 -0
- agno/models/aimlapi/aimlapi.py +62 -0
- agno/models/anthropic/__init__.py +4 -0
- agno/models/anthropic/claude.py +960 -496
- agno/models/aws/__init__.py +15 -0
- agno/models/aws/bedrock.py +686 -451
- agno/models/aws/claude.py +190 -183
- agno/models/azure/__init__.py +18 -1
- agno/models/azure/ai_foundry.py +489 -0
- agno/models/azure/openai_chat.py +89 -40
- agno/models/base.py +2477 -550
- agno/models/cerebras/__init__.py +12 -0
- agno/models/cerebras/cerebras.py +565 -0
- agno/models/cerebras/cerebras_openai.py +131 -0
- agno/models/cohere/__init__.py +4 -0
- agno/models/cohere/chat.py +306 -492
- agno/models/cometapi/__init__.py +5 -0
- agno/models/cometapi/cometapi.py +74 -0
- agno/models/dashscope/__init__.py +5 -0
- agno/models/dashscope/dashscope.py +90 -0
- agno/models/deepinfra/__init__.py +5 -0
- agno/models/deepinfra/deepinfra.py +45 -0
- agno/models/deepseek/__init__.py +4 -0
- agno/models/deepseek/deepseek.py +110 -9
- agno/models/fireworks/__init__.py +4 -0
- agno/models/fireworks/fireworks.py +19 -22
- agno/models/google/__init__.py +3 -7
- agno/models/google/gemini.py +1717 -662
- agno/models/google/utils.py +22 -0
- agno/models/groq/__init__.py +4 -0
- agno/models/groq/groq.py +391 -666
- agno/models/huggingface/__init__.py +4 -0
- agno/models/huggingface/huggingface.py +266 -538
- agno/models/ibm/__init__.py +5 -0
- agno/models/ibm/watsonx.py +432 -0
- agno/models/internlm/__init__.py +3 -0
- agno/models/internlm/internlm.py +20 -3
- agno/models/langdb/__init__.py +1 -0
- agno/models/langdb/langdb.py +60 -0
- agno/models/litellm/__init__.py +14 -0
- agno/models/litellm/chat.py +503 -0
- agno/models/litellm/litellm_openai.py +42 -0
- agno/models/llama_cpp/__init__.py +5 -0
- agno/models/llama_cpp/llama_cpp.py +22 -0
- agno/models/lmstudio/__init__.py +5 -0
- agno/models/lmstudio/lmstudio.py +25 -0
- agno/models/message.py +361 -39
- agno/models/meta/__init__.py +12 -0
- agno/models/meta/llama.py +502 -0
- agno/models/meta/llama_openai.py +79 -0
- agno/models/metrics.py +120 -0
- agno/models/mistral/__init__.py +4 -0
- agno/models/mistral/mistral.py +293 -393
- agno/models/nebius/__init__.py +3 -0
- agno/models/nebius/nebius.py +53 -0
- agno/models/nexus/__init__.py +3 -0
- agno/models/nexus/nexus.py +22 -0
- agno/models/nvidia/__init__.py +4 -0
- agno/models/nvidia/nvidia.py +22 -3
- agno/models/ollama/__init__.py +4 -2
- agno/models/ollama/chat.py +257 -492
- agno/models/openai/__init__.py +7 -0
- agno/models/openai/chat.py +725 -770
- agno/models/openai/like.py +16 -2
- agno/models/openai/responses.py +1121 -0
- agno/models/openrouter/__init__.py +4 -0
- agno/models/openrouter/openrouter.py +62 -5
- agno/models/perplexity/__init__.py +5 -0
- agno/models/perplexity/perplexity.py +203 -0
- agno/models/portkey/__init__.py +3 -0
- agno/models/portkey/portkey.py +82 -0
- agno/models/requesty/__init__.py +5 -0
- agno/models/requesty/requesty.py +69 -0
- agno/models/response.py +177 -7
- agno/models/sambanova/__init__.py +4 -0
- agno/models/sambanova/sambanova.py +23 -4
- agno/models/siliconflow/__init__.py +5 -0
- agno/models/siliconflow/siliconflow.py +42 -0
- agno/models/together/__init__.py +4 -0
- agno/models/together/together.py +21 -164
- agno/models/utils.py +266 -0
- agno/models/vercel/__init__.py +3 -0
- agno/models/vercel/v0.py +43 -0
- agno/models/vertexai/__init__.py +0 -1
- agno/models/vertexai/claude.py +190 -0
- agno/models/vllm/__init__.py +3 -0
- agno/models/vllm/vllm.py +83 -0
- agno/models/xai/__init__.py +2 -0
- agno/models/xai/xai.py +111 -7
- agno/os/__init__.py +3 -0
- agno/os/app.py +1027 -0
- agno/os/auth.py +244 -0
- agno/os/config.py +126 -0
- agno/os/interfaces/__init__.py +1 -0
- agno/os/interfaces/a2a/__init__.py +3 -0
- agno/os/interfaces/a2a/a2a.py +42 -0
- agno/os/interfaces/a2a/router.py +249 -0
- agno/os/interfaces/a2a/utils.py +924 -0
- agno/os/interfaces/agui/__init__.py +3 -0
- agno/os/interfaces/agui/agui.py +47 -0
- agno/os/interfaces/agui/router.py +147 -0
- agno/os/interfaces/agui/utils.py +574 -0
- agno/os/interfaces/base.py +25 -0
- agno/os/interfaces/slack/__init__.py +3 -0
- agno/os/interfaces/slack/router.py +148 -0
- agno/os/interfaces/slack/security.py +30 -0
- agno/os/interfaces/slack/slack.py +47 -0
- agno/os/interfaces/whatsapp/__init__.py +3 -0
- agno/os/interfaces/whatsapp/router.py +210 -0
- agno/os/interfaces/whatsapp/security.py +55 -0
- agno/os/interfaces/whatsapp/whatsapp.py +36 -0
- agno/os/mcp.py +293 -0
- agno/os/middleware/__init__.py +9 -0
- agno/os/middleware/jwt.py +797 -0
- agno/os/router.py +258 -0
- agno/os/routers/__init__.py +3 -0
- agno/os/routers/agents/__init__.py +3 -0
- agno/os/routers/agents/router.py +599 -0
- agno/os/routers/agents/schema.py +261 -0
- agno/os/routers/evals/__init__.py +3 -0
- agno/os/routers/evals/evals.py +450 -0
- agno/os/routers/evals/schemas.py +174 -0
- agno/os/routers/evals/utils.py +231 -0
- agno/os/routers/health.py +31 -0
- agno/os/routers/home.py +52 -0
- agno/os/routers/knowledge/__init__.py +3 -0
- agno/os/routers/knowledge/knowledge.py +1008 -0
- agno/os/routers/knowledge/schemas.py +178 -0
- agno/os/routers/memory/__init__.py +3 -0
- agno/os/routers/memory/memory.py +661 -0
- agno/os/routers/memory/schemas.py +88 -0
- agno/os/routers/metrics/__init__.py +3 -0
- agno/os/routers/metrics/metrics.py +190 -0
- agno/os/routers/metrics/schemas.py +47 -0
- agno/os/routers/session/__init__.py +3 -0
- agno/os/routers/session/session.py +997 -0
- agno/os/routers/teams/__init__.py +3 -0
- agno/os/routers/teams/router.py +512 -0
- agno/os/routers/teams/schema.py +257 -0
- agno/os/routers/traces/__init__.py +3 -0
- agno/os/routers/traces/schemas.py +414 -0
- agno/os/routers/traces/traces.py +499 -0
- agno/os/routers/workflows/__init__.py +3 -0
- agno/os/routers/workflows/router.py +624 -0
- agno/os/routers/workflows/schema.py +75 -0
- agno/os/schema.py +534 -0
- agno/os/scopes.py +469 -0
- agno/{playground → os}/settings.py +7 -15
- agno/os/utils.py +973 -0
- agno/reasoning/anthropic.py +80 -0
- agno/reasoning/azure_ai_foundry.py +67 -0
- agno/reasoning/deepseek.py +63 -0
- agno/reasoning/default.py +97 -0
- agno/reasoning/gemini.py +73 -0
- agno/reasoning/groq.py +71 -0
- agno/reasoning/helpers.py +24 -1
- agno/reasoning/ollama.py +67 -0
- agno/reasoning/openai.py +86 -0
- agno/reasoning/step.py +2 -1
- agno/reasoning/vertexai.py +76 -0
- agno/run/__init__.py +6 -0
- agno/run/agent.py +822 -0
- agno/run/base.py +247 -0
- agno/run/cancel.py +81 -0
- agno/run/requirement.py +181 -0
- agno/run/team.py +767 -0
- agno/run/workflow.py +708 -0
- agno/session/__init__.py +10 -0
- agno/session/agent.py +260 -0
- agno/session/summary.py +265 -0
- agno/session/team.py +342 -0
- agno/session/workflow.py +501 -0
- agno/table.py +10 -0
- agno/team/__init__.py +37 -0
- agno/team/team.py +9536 -0
- agno/tools/__init__.py +7 -0
- agno/tools/agentql.py +120 -0
- agno/tools/airflow.py +22 -12
- agno/tools/api.py +122 -0
- agno/tools/apify.py +276 -83
- agno/tools/{arxiv_toolkit.py → arxiv.py} +20 -12
- agno/tools/aws_lambda.py +28 -7
- agno/tools/aws_ses.py +66 -0
- agno/tools/baidusearch.py +11 -4
- agno/tools/bitbucket.py +292 -0
- agno/tools/brandfetch.py +213 -0
- agno/tools/bravesearch.py +106 -0
- agno/tools/brightdata.py +367 -0
- agno/tools/browserbase.py +209 -0
- agno/tools/calcom.py +32 -23
- agno/tools/calculator.py +24 -37
- agno/tools/cartesia.py +187 -0
- agno/tools/{clickup_tool.py → clickup.py} +17 -28
- agno/tools/confluence.py +91 -26
- agno/tools/crawl4ai.py +139 -43
- agno/tools/csv_toolkit.py +28 -22
- agno/tools/dalle.py +36 -22
- agno/tools/daytona.py +475 -0
- agno/tools/decorator.py +169 -14
- agno/tools/desi_vocal.py +23 -11
- agno/tools/discord.py +32 -29
- agno/tools/docker.py +716 -0
- agno/tools/duckdb.py +76 -81
- agno/tools/duckduckgo.py +43 -40
- agno/tools/e2b.py +703 -0
- agno/tools/eleven_labs.py +65 -54
- agno/tools/email.py +13 -5
- agno/tools/evm.py +129 -0
- agno/tools/exa.py +324 -42
- agno/tools/fal.py +39 -35
- agno/tools/file.py +196 -30
- agno/tools/file_generation.py +356 -0
- agno/tools/financial_datasets.py +288 -0
- agno/tools/firecrawl.py +108 -33
- agno/tools/function.py +960 -122
- agno/tools/giphy.py +34 -12
- agno/tools/github.py +1294 -97
- agno/tools/gmail.py +922 -0
- agno/tools/google_bigquery.py +117 -0
- agno/tools/google_drive.py +271 -0
- agno/tools/google_maps.py +253 -0
- agno/tools/googlecalendar.py +607 -107
- agno/tools/googlesheets.py +377 -0
- agno/tools/hackernews.py +20 -12
- agno/tools/jina.py +24 -14
- agno/tools/jira.py +48 -19
- agno/tools/knowledge.py +218 -0
- agno/tools/linear.py +82 -43
- agno/tools/linkup.py +58 -0
- agno/tools/local_file_system.py +15 -7
- agno/tools/lumalab.py +41 -26
- agno/tools/mcp/__init__.py +10 -0
- agno/tools/mcp/mcp.py +331 -0
- agno/tools/mcp/multi_mcp.py +347 -0
- agno/tools/mcp/params.py +24 -0
- agno/tools/mcp_toolbox.py +284 -0
- agno/tools/mem0.py +193 -0
- agno/tools/memory.py +419 -0
- agno/tools/mlx_transcribe.py +11 -9
- agno/tools/models/azure_openai.py +190 -0
- agno/tools/models/gemini.py +203 -0
- agno/tools/models/groq.py +158 -0
- agno/tools/models/morph.py +186 -0
- agno/tools/models/nebius.py +124 -0
- agno/tools/models_labs.py +163 -82
- agno/tools/moviepy_video.py +18 -13
- agno/tools/nano_banana.py +151 -0
- agno/tools/neo4j.py +134 -0
- agno/tools/newspaper.py +15 -4
- agno/tools/newspaper4k.py +19 -6
- agno/tools/notion.py +204 -0
- agno/tools/openai.py +181 -17
- agno/tools/openbb.py +27 -20
- agno/tools/opencv.py +321 -0
- agno/tools/openweather.py +233 -0
- agno/tools/oxylabs.py +385 -0
- agno/tools/pandas.py +25 -15
- agno/tools/parallel.py +314 -0
- agno/tools/postgres.py +238 -185
- agno/tools/pubmed.py +125 -13
- agno/tools/python.py +48 -35
- agno/tools/reasoning.py +283 -0
- agno/tools/reddit.py +207 -29
- agno/tools/redshift.py +406 -0
- agno/tools/replicate.py +69 -26
- agno/tools/resend.py +11 -6
- agno/tools/scrapegraph.py +179 -19
- agno/tools/searxng.py +23 -31
- agno/tools/serpapi.py +15 -10
- agno/tools/serper.py +255 -0
- agno/tools/shell.py +23 -12
- agno/tools/shopify.py +1519 -0
- agno/tools/slack.py +56 -14
- agno/tools/sleep.py +8 -6
- agno/tools/spider.py +35 -11
- agno/tools/spotify.py +919 -0
- agno/tools/sql.py +34 -19
- agno/tools/tavily.py +158 -8
- agno/tools/telegram.py +18 -8
- agno/tools/todoist.py +218 -0
- agno/tools/toolkit.py +134 -9
- agno/tools/trafilatura.py +388 -0
- agno/tools/trello.py +25 -28
- agno/tools/twilio.py +18 -9
- agno/tools/user_control_flow.py +78 -0
- agno/tools/valyu.py +228 -0
- agno/tools/visualization.py +467 -0
- agno/tools/webbrowser.py +28 -0
- agno/tools/webex.py +76 -0
- agno/tools/website.py +23 -19
- agno/tools/webtools.py +45 -0
- agno/tools/whatsapp.py +286 -0
- agno/tools/wikipedia.py +28 -19
- agno/tools/workflow.py +285 -0
- agno/tools/{twitter.py → x.py} +142 -46
- agno/tools/yfinance.py +41 -39
- agno/tools/youtube.py +34 -17
- agno/tools/zendesk.py +15 -5
- agno/tools/zep.py +454 -0
- agno/tools/zoom.py +86 -37
- agno/tracing/__init__.py +12 -0
- agno/tracing/exporter.py +157 -0
- agno/tracing/schemas.py +276 -0
- agno/tracing/setup.py +111 -0
- agno/utils/agent.py +938 -0
- agno/utils/audio.py +37 -1
- agno/utils/certs.py +27 -0
- agno/utils/code_execution.py +11 -0
- agno/utils/common.py +103 -20
- agno/utils/cryptography.py +22 -0
- agno/utils/dttm.py +33 -0
- agno/utils/events.py +700 -0
- agno/utils/functions.py +107 -37
- agno/utils/gemini.py +426 -0
- agno/utils/hooks.py +171 -0
- agno/utils/http.py +185 -0
- agno/utils/json_schema.py +159 -37
- agno/utils/knowledge.py +36 -0
- agno/utils/location.py +19 -0
- agno/utils/log.py +221 -8
- agno/utils/mcp.py +214 -0
- agno/utils/media.py +335 -14
- agno/utils/merge_dict.py +22 -1
- agno/utils/message.py +77 -2
- agno/utils/models/ai_foundry.py +50 -0
- agno/utils/models/claude.py +373 -0
- agno/utils/models/cohere.py +94 -0
- agno/utils/models/llama.py +85 -0
- agno/utils/models/mistral.py +100 -0
- agno/utils/models/openai_responses.py +140 -0
- agno/utils/models/schema_utils.py +153 -0
- agno/utils/models/watsonx.py +41 -0
- agno/utils/openai.py +257 -0
- agno/utils/pickle.py +1 -1
- agno/utils/pprint.py +124 -8
- agno/utils/print_response/agent.py +930 -0
- agno/utils/print_response/team.py +1914 -0
- agno/utils/print_response/workflow.py +1668 -0
- agno/utils/prompts.py +111 -0
- agno/utils/reasoning.py +108 -0
- agno/utils/response.py +163 -0
- agno/utils/serialize.py +32 -0
- agno/utils/shell.py +4 -4
- agno/utils/streamlit.py +487 -0
- agno/utils/string.py +204 -51
- agno/utils/team.py +139 -0
- agno/utils/timer.py +9 -2
- agno/utils/tokens.py +657 -0
- agno/utils/tools.py +19 -1
- agno/utils/whatsapp.py +305 -0
- agno/utils/yaml_io.py +3 -3
- agno/vectordb/__init__.py +2 -0
- agno/vectordb/base.py +87 -9
- agno/vectordb/cassandra/__init__.py +5 -1
- agno/vectordb/cassandra/cassandra.py +383 -27
- agno/vectordb/chroma/__init__.py +4 -0
- agno/vectordb/chroma/chromadb.py +748 -83
- agno/vectordb/clickhouse/__init__.py +7 -1
- agno/vectordb/clickhouse/clickhousedb.py +554 -53
- agno/vectordb/couchbase/__init__.py +3 -0
- agno/vectordb/couchbase/couchbase.py +1446 -0
- agno/vectordb/lancedb/__init__.py +5 -0
- agno/vectordb/lancedb/lance_db.py +730 -98
- agno/vectordb/langchaindb/__init__.py +5 -0
- agno/vectordb/langchaindb/langchaindb.py +163 -0
- agno/vectordb/lightrag/__init__.py +5 -0
- agno/vectordb/lightrag/lightrag.py +388 -0
- agno/vectordb/llamaindex/__init__.py +3 -0
- agno/vectordb/llamaindex/llamaindexdb.py +166 -0
- agno/vectordb/milvus/__init__.py +3 -0
- agno/vectordb/milvus/milvus.py +966 -78
- agno/vectordb/mongodb/__init__.py +9 -1
- agno/vectordb/mongodb/mongodb.py +1175 -172
- agno/vectordb/pgvector/__init__.py +8 -0
- agno/vectordb/pgvector/pgvector.py +599 -115
- agno/vectordb/pineconedb/__init__.py +5 -1
- agno/vectordb/pineconedb/pineconedb.py +406 -43
- agno/vectordb/qdrant/__init__.py +4 -0
- agno/vectordb/qdrant/qdrant.py +914 -61
- agno/vectordb/redis/__init__.py +9 -0
- agno/vectordb/redis/redisdb.py +682 -0
- agno/vectordb/singlestore/__init__.py +8 -1
- agno/vectordb/singlestore/singlestore.py +771 -0
- agno/vectordb/surrealdb/__init__.py +3 -0
- agno/vectordb/surrealdb/surrealdb.py +663 -0
- agno/vectordb/upstashdb/__init__.py +5 -0
- agno/vectordb/upstashdb/upstashdb.py +718 -0
- agno/vectordb/weaviate/__init__.py +8 -0
- agno/vectordb/weaviate/index.py +15 -0
- agno/vectordb/weaviate/weaviate.py +1009 -0
- agno/workflow/__init__.py +23 -1
- agno/workflow/agent.py +299 -0
- agno/workflow/condition.py +759 -0
- agno/workflow/loop.py +756 -0
- agno/workflow/parallel.py +853 -0
- agno/workflow/router.py +723 -0
- agno/workflow/step.py +1564 -0
- agno/workflow/steps.py +613 -0
- agno/workflow/types.py +556 -0
- agno/workflow/workflow.py +4327 -514
- agno-2.3.13.dist-info/METADATA +639 -0
- agno-2.3.13.dist-info/RECORD +613 -0
- {agno-0.1.2.dist-info → agno-2.3.13.dist-info}/WHEEL +1 -1
- agno-2.3.13.dist-info/licenses/LICENSE +201 -0
- agno/api/playground.py +0 -91
- agno/api/schemas/playground.py +0 -22
- agno/api/schemas/user.py +0 -22
- agno/api/schemas/workspace.py +0 -46
- agno/api/user.py +0 -160
- agno/api/workspace.py +0 -151
- agno/cli/auth_server.py +0 -118
- agno/cli/config.py +0 -275
- agno/cli/console.py +0 -88
- agno/cli/credentials.py +0 -23
- agno/cli/entrypoint.py +0 -571
- agno/cli/operator.py +0 -355
- agno/cli/settings.py +0 -85
- agno/cli/ws/ws_cli.py +0 -817
- agno/constants.py +0 -13
- agno/document/__init__.py +0 -1
- agno/document/chunking/semantic.py +0 -47
- agno/document/chunking/strategy.py +0 -31
- agno/document/reader/__init__.py +0 -1
- agno/document/reader/arxiv_reader.py +0 -41
- agno/document/reader/base.py +0 -22
- agno/document/reader/csv_reader.py +0 -84
- agno/document/reader/docx_reader.py +0 -46
- agno/document/reader/firecrawl_reader.py +0 -99
- agno/document/reader/json_reader.py +0 -43
- agno/document/reader/pdf_reader.py +0 -219
- agno/document/reader/s3/pdf_reader.py +0 -46
- agno/document/reader/s3/text_reader.py +0 -51
- agno/document/reader/text_reader.py +0 -41
- agno/document/reader/website_reader.py +0 -175
- agno/document/reader/youtube_reader.py +0 -50
- agno/embedder/__init__.py +0 -1
- agno/embedder/azure_openai.py +0 -86
- agno/embedder/cohere.py +0 -72
- agno/embedder/fastembed.py +0 -37
- agno/embedder/google.py +0 -73
- agno/embedder/huggingface.py +0 -54
- agno/embedder/mistral.py +0 -80
- agno/embedder/ollama.py +0 -57
- agno/embedder/openai.py +0 -74
- agno/embedder/sentence_transformer.py +0 -38
- agno/embedder/voyageai.py +0 -64
- agno/eval/perf.py +0 -201
- agno/file/__init__.py +0 -1
- agno/file/file.py +0 -16
- agno/file/local/csv.py +0 -32
- agno/file/local/txt.py +0 -19
- agno/infra/app.py +0 -240
- agno/infra/base.py +0 -144
- agno/infra/context.py +0 -20
- agno/infra/db_app.py +0 -52
- agno/infra/resource.py +0 -205
- agno/infra/resources.py +0 -55
- agno/knowledge/agent.py +0 -230
- agno/knowledge/arxiv.py +0 -22
- agno/knowledge/combined.py +0 -22
- agno/knowledge/csv.py +0 -28
- agno/knowledge/csv_url.py +0 -19
- agno/knowledge/document.py +0 -20
- agno/knowledge/docx.py +0 -30
- agno/knowledge/json.py +0 -28
- agno/knowledge/langchain.py +0 -71
- agno/knowledge/llamaindex.py +0 -66
- agno/knowledge/pdf.py +0 -28
- agno/knowledge/pdf_url.py +0 -26
- agno/knowledge/s3/base.py +0 -60
- agno/knowledge/s3/pdf.py +0 -21
- agno/knowledge/s3/text.py +0 -23
- agno/knowledge/text.py +0 -30
- agno/knowledge/website.py +0 -88
- agno/knowledge/wikipedia.py +0 -31
- agno/knowledge/youtube.py +0 -22
- agno/memory/agent.py +0 -392
- agno/memory/classifier.py +0 -104
- agno/memory/db/__init__.py +0 -1
- agno/memory/db/base.py +0 -42
- agno/memory/db/mongodb.py +0 -189
- agno/memory/db/postgres.py +0 -203
- agno/memory/db/sqlite.py +0 -193
- agno/memory/memory.py +0 -15
- agno/memory/row.py +0 -36
- agno/memory/summarizer.py +0 -192
- agno/memory/summary.py +0 -19
- agno/memory/workflow.py +0 -38
- agno/models/google/gemini_openai.py +0 -26
- agno/models/ollama/hermes.py +0 -221
- agno/models/ollama/tools.py +0 -362
- agno/models/vertexai/gemini.py +0 -595
- agno/playground/__init__.py +0 -3
- agno/playground/async_router.py +0 -421
- agno/playground/deploy.py +0 -249
- agno/playground/operator.py +0 -92
- agno/playground/playground.py +0 -91
- agno/playground/schemas.py +0 -76
- agno/playground/serve.py +0 -55
- agno/playground/sync_router.py +0 -405
- agno/reasoning/agent.py +0 -68
- agno/run/response.py +0 -112
- agno/storage/agent/__init__.py +0 -0
- agno/storage/agent/base.py +0 -38
- agno/storage/agent/dynamodb.py +0 -350
- agno/storage/agent/json.py +0 -92
- agno/storage/agent/mongodb.py +0 -228
- agno/storage/agent/postgres.py +0 -367
- agno/storage/agent/session.py +0 -79
- agno/storage/agent/singlestore.py +0 -303
- agno/storage/agent/sqlite.py +0 -357
- agno/storage/agent/yaml.py +0 -93
- agno/storage/workflow/__init__.py +0 -0
- agno/storage/workflow/base.py +0 -40
- agno/storage/workflow/mongodb.py +0 -233
- agno/storage/workflow/postgres.py +0 -366
- agno/storage/workflow/session.py +0 -60
- agno/storage/workflow/sqlite.py +0 -359
- agno/tools/googlesearch.py +0 -88
- agno/utils/defaults.py +0 -57
- agno/utils/filesystem.py +0 -39
- agno/utils/git.py +0 -52
- agno/utils/json_io.py +0 -30
- agno/utils/load_env.py +0 -19
- agno/utils/py_io.py +0 -19
- agno/utils/pyproject.py +0 -18
- agno/utils/resource_filter.py +0 -31
- agno/vectordb/singlestore/s2vectordb.py +0 -390
- agno/vectordb/singlestore/s2vectordb2.py +0 -355
- agno/workspace/__init__.py +0 -0
- agno/workspace/config.py +0 -325
- agno/workspace/enums.py +0 -6
- agno/workspace/helpers.py +0 -48
- agno/workspace/operator.py +0 -758
- agno/workspace/settings.py +0 -63
- agno-0.1.2.dist-info/LICENSE +0 -375
- agno-0.1.2.dist-info/METADATA +0 -502
- agno-0.1.2.dist-info/RECORD +0 -352
- agno-0.1.2.dist-info/entry_points.txt +0 -3
- /agno/{cli → db/migrations}/__init__.py +0 -0
- /agno/{cli/ws → db/migrations/versions}/__init__.py +0 -0
- /agno/{document/chunking/__init__.py → db/schemas/metrics.py} +0 -0
- /agno/{document/reader/s3 → integrations}/__init__.py +0 -0
- /agno/{file/local → knowledge/chunking}/__init__.py +0 -0
- /agno/{infra → knowledge/remote_content}/__init__.py +0 -0
- /agno/{knowledge/s3 → tools/models}/__init__.py +0 -0
- /agno/{reranker → utils/models}/__init__.py +0 -0
- /agno/{storage → utils/print_response}/__init__.py +0 -0
- {agno-0.1.2.dist-info → agno-2.3.13.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,2870 @@
|
|
|
1
|
+
import time
|
|
2
|
+
from datetime import date, datetime, timedelta, timezone
|
|
3
|
+
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Tuple, Union
|
|
4
|
+
from uuid import uuid4
|
|
5
|
+
|
|
6
|
+
if TYPE_CHECKING:
|
|
7
|
+
from agno.tracing.schemas import Span, Trace
|
|
8
|
+
|
|
9
|
+
from agno.db.base import BaseDb, SessionType
|
|
10
|
+
from agno.db.migrations.manager import MigrationManager
|
|
11
|
+
from agno.db.postgres.schemas import get_table_schema_definition
|
|
12
|
+
from agno.db.postgres.utils import (
|
|
13
|
+
apply_sorting,
|
|
14
|
+
bulk_upsert_metrics,
|
|
15
|
+
calculate_date_metrics,
|
|
16
|
+
create_schema,
|
|
17
|
+
deserialize_cultural_knowledge,
|
|
18
|
+
fetch_all_sessions_data,
|
|
19
|
+
get_dates_to_calculate_metrics_for,
|
|
20
|
+
is_table_available,
|
|
21
|
+
is_valid_table,
|
|
22
|
+
serialize_cultural_knowledge,
|
|
23
|
+
)
|
|
24
|
+
from agno.db.schemas.culture import CulturalKnowledge
|
|
25
|
+
from agno.db.schemas.evals import EvalFilterType, EvalRunRecord, EvalType
|
|
26
|
+
from agno.db.schemas.knowledge import KnowledgeRow
|
|
27
|
+
from agno.db.schemas.memory import UserMemory
|
|
28
|
+
from agno.session import AgentSession, Session, TeamSession, WorkflowSession
|
|
29
|
+
from agno.utils.log import log_debug, log_error, log_info, log_warning
|
|
30
|
+
from agno.utils.string import generate_id
|
|
31
|
+
|
|
32
|
+
try:
|
|
33
|
+
from sqlalchemy import ForeignKey, Index, String, UniqueConstraint, and_, case, func, or_, select, update
|
|
34
|
+
from sqlalchemy.dialects import postgresql
|
|
35
|
+
from sqlalchemy.dialects.postgresql import TIMESTAMP
|
|
36
|
+
from sqlalchemy.engine import Engine, create_engine
|
|
37
|
+
from sqlalchemy.exc import ProgrammingError
|
|
38
|
+
from sqlalchemy.orm import scoped_session, sessionmaker
|
|
39
|
+
from sqlalchemy.schema import Column, MetaData, Table
|
|
40
|
+
from sqlalchemy.sql.expression import text
|
|
41
|
+
except ImportError:
|
|
42
|
+
raise ImportError("`sqlalchemy` not installed. Please install it using `pip install sqlalchemy`")
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
class PostgresDb(BaseDb):
|
|
46
|
+
def __init__(
|
|
47
|
+
self,
|
|
48
|
+
db_url: Optional[str] = None,
|
|
49
|
+
db_engine: Optional[Engine] = None,
|
|
50
|
+
db_schema: Optional[str] = None,
|
|
51
|
+
session_table: Optional[str] = None,
|
|
52
|
+
culture_table: Optional[str] = None,
|
|
53
|
+
memory_table: Optional[str] = None,
|
|
54
|
+
metrics_table: Optional[str] = None,
|
|
55
|
+
eval_table: Optional[str] = None,
|
|
56
|
+
knowledge_table: Optional[str] = None,
|
|
57
|
+
traces_table: Optional[str] = None,
|
|
58
|
+
spans_table: Optional[str] = None,
|
|
59
|
+
versions_table: Optional[str] = None,
|
|
60
|
+
id: Optional[str] = None,
|
|
61
|
+
create_schema: bool = True,
|
|
62
|
+
):
|
|
63
|
+
"""
|
|
64
|
+
Interface for interacting with a PostgreSQL database.
|
|
65
|
+
|
|
66
|
+
The following order is used to determine the database connection:
|
|
67
|
+
1. Use the db_engine if provided
|
|
68
|
+
2. Use the db_url
|
|
69
|
+
3. Raise an error if neither is provided
|
|
70
|
+
|
|
71
|
+
Args:
|
|
72
|
+
db_url (Optional[str]): The database URL to connect to.
|
|
73
|
+
db_engine (Optional[Engine]): The SQLAlchemy database engine to use.
|
|
74
|
+
db_schema (Optional[str]): The database schema to use.
|
|
75
|
+
session_table (Optional[str]): Name of the table to store Agent, Team and Workflow sessions.
|
|
76
|
+
memory_table (Optional[str]): Name of the table to store memories.
|
|
77
|
+
metrics_table (Optional[str]): Name of the table to store metrics.
|
|
78
|
+
eval_table (Optional[str]): Name of the table to store evaluation runs data.
|
|
79
|
+
knowledge_table (Optional[str]): Name of the table to store knowledge content.
|
|
80
|
+
culture_table (Optional[str]): Name of the table to store cultural knowledge.
|
|
81
|
+
traces_table (Optional[str]): Name of the table to store run traces.
|
|
82
|
+
spans_table (Optional[str]): Name of the table to store span events.
|
|
83
|
+
versions_table (Optional[str]): Name of the table to store schema versions.
|
|
84
|
+
id (Optional[str]): ID of the database.
|
|
85
|
+
create_schema (bool): Whether to automatically create the database schema if it doesn't exist.
|
|
86
|
+
Set to False if schema is managed externally (e.g., via migrations). Defaults to True.
|
|
87
|
+
|
|
88
|
+
Raises:
|
|
89
|
+
ValueError: If neither db_url nor db_engine is provided.
|
|
90
|
+
ValueError: If none of the tables are provided.
|
|
91
|
+
"""
|
|
92
|
+
_engine: Optional[Engine] = db_engine
|
|
93
|
+
if _engine is None and db_url is not None:
|
|
94
|
+
_engine = create_engine(db_url)
|
|
95
|
+
if _engine is None:
|
|
96
|
+
raise ValueError("One of db_url or db_engine must be provided")
|
|
97
|
+
|
|
98
|
+
self.db_url: Optional[str] = db_url
|
|
99
|
+
self.db_engine: Engine = _engine
|
|
100
|
+
|
|
101
|
+
if id is None:
|
|
102
|
+
base_seed = db_url or str(db_engine.url) # type: ignore
|
|
103
|
+
schema_suffix = db_schema if db_schema is not None else "ai"
|
|
104
|
+
seed = f"{base_seed}#{schema_suffix}"
|
|
105
|
+
id = generate_id(seed)
|
|
106
|
+
|
|
107
|
+
super().__init__(
|
|
108
|
+
id=id,
|
|
109
|
+
session_table=session_table,
|
|
110
|
+
memory_table=memory_table,
|
|
111
|
+
metrics_table=metrics_table,
|
|
112
|
+
eval_table=eval_table,
|
|
113
|
+
knowledge_table=knowledge_table,
|
|
114
|
+
culture_table=culture_table,
|
|
115
|
+
traces_table=traces_table,
|
|
116
|
+
spans_table=spans_table,
|
|
117
|
+
versions_table=versions_table,
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
self.db_schema: str = db_schema if db_schema is not None else "ai"
|
|
121
|
+
self.metadata: MetaData = MetaData(schema=self.db_schema)
|
|
122
|
+
self.create_schema: bool = create_schema
|
|
123
|
+
|
|
124
|
+
# Initialize database session
|
|
125
|
+
self.Session: scoped_session = scoped_session(sessionmaker(bind=self.db_engine, expire_on_commit=False))
|
|
126
|
+
|
|
127
|
+
# -- DB methods --
|
|
128
|
+
def table_exists(self, table_name: str) -> bool:
|
|
129
|
+
"""Check if a table with the given name exists in the Postgres database.
|
|
130
|
+
|
|
131
|
+
Args:
|
|
132
|
+
table_name: Name of the table to check
|
|
133
|
+
|
|
134
|
+
Returns:
|
|
135
|
+
bool: True if the table exists in the database, False otherwise
|
|
136
|
+
"""
|
|
137
|
+
with self.Session() as sess:
|
|
138
|
+
return is_table_available(session=sess, table_name=table_name, db_schema=self.db_schema)
|
|
139
|
+
|
|
140
|
+
def _create_all_tables(self):
|
|
141
|
+
"""Create all tables for the database."""
|
|
142
|
+
tables_to_create = [
|
|
143
|
+
(self.session_table_name, "sessions"),
|
|
144
|
+
(self.memory_table_name, "memories"),
|
|
145
|
+
(self.metrics_table_name, "metrics"),
|
|
146
|
+
(self.eval_table_name, "evals"),
|
|
147
|
+
(self.knowledge_table_name, "knowledge"),
|
|
148
|
+
(self.versions_table_name, "versions"),
|
|
149
|
+
]
|
|
150
|
+
|
|
151
|
+
for table_name, table_type in tables_to_create:
|
|
152
|
+
self._get_or_create_table(table_name=table_name, table_type=table_type, create_table_if_not_found=True)
|
|
153
|
+
|
|
154
|
+
def _create_table(self, table_name: str, table_type: str) -> Table:
|
|
155
|
+
"""
|
|
156
|
+
Create a table with the appropriate schema based on the table type.
|
|
157
|
+
|
|
158
|
+
Args:
|
|
159
|
+
table_name (str): Name of the table to create
|
|
160
|
+
table_type (str): Type of table (used to get schema definition)
|
|
161
|
+
|
|
162
|
+
Returns:
|
|
163
|
+
Table: SQLAlchemy Table object
|
|
164
|
+
"""
|
|
165
|
+
try:
|
|
166
|
+
table_schema = get_table_schema_definition(table_type).copy()
|
|
167
|
+
|
|
168
|
+
columns: List[Column] = []
|
|
169
|
+
indexes: List[str] = []
|
|
170
|
+
unique_constraints: List[str] = []
|
|
171
|
+
schema_unique_constraints = table_schema.pop("_unique_constraints", [])
|
|
172
|
+
|
|
173
|
+
# Get the columns, indexes, and unique constraints from the table schema
|
|
174
|
+
for col_name, col_config in table_schema.items():
|
|
175
|
+
column_args = [col_name, col_config["type"]()]
|
|
176
|
+
column_kwargs = {}
|
|
177
|
+
if col_config.get("primary_key", False):
|
|
178
|
+
column_kwargs["primary_key"] = True
|
|
179
|
+
if "nullable" in col_config:
|
|
180
|
+
column_kwargs["nullable"] = col_config["nullable"]
|
|
181
|
+
if col_config.get("index", False):
|
|
182
|
+
indexes.append(col_name)
|
|
183
|
+
if col_config.get("unique", False):
|
|
184
|
+
column_kwargs["unique"] = True
|
|
185
|
+
unique_constraints.append(col_name)
|
|
186
|
+
|
|
187
|
+
# Handle foreign key constraint
|
|
188
|
+
if "foreign_key" in col_config:
|
|
189
|
+
fk_ref = col_config["foreign_key"]
|
|
190
|
+
# For spans table, dynamically replace the traces table reference
|
|
191
|
+
# with the actual trace table name configured for this db instance
|
|
192
|
+
if table_type == "spans" and "trace_id" in fk_ref:
|
|
193
|
+
fk_ref = f"{self.db_schema}.{self.trace_table_name}.trace_id"
|
|
194
|
+
column_args.append(ForeignKey(fk_ref))
|
|
195
|
+
|
|
196
|
+
columns.append(Column(*column_args, **column_kwargs)) # type: ignore
|
|
197
|
+
|
|
198
|
+
# Create the table object
|
|
199
|
+
table = Table(table_name, self.metadata, *columns, schema=self.db_schema)
|
|
200
|
+
|
|
201
|
+
# Add multi-column unique constraints with table-specific names
|
|
202
|
+
for constraint in schema_unique_constraints:
|
|
203
|
+
constraint_name = f"{table_name}_{constraint['name']}"
|
|
204
|
+
constraint_columns = constraint["columns"]
|
|
205
|
+
table.append_constraint(UniqueConstraint(*constraint_columns, name=constraint_name))
|
|
206
|
+
|
|
207
|
+
# Add indexes to the table definition
|
|
208
|
+
for idx_col in indexes:
|
|
209
|
+
idx_name = f"idx_{table_name}_{idx_col}"
|
|
210
|
+
table.append_constraint(Index(idx_name, idx_col))
|
|
211
|
+
|
|
212
|
+
if self.create_schema:
|
|
213
|
+
with self.Session() as sess, sess.begin():
|
|
214
|
+
create_schema(session=sess, db_schema=self.db_schema)
|
|
215
|
+
|
|
216
|
+
# Create table
|
|
217
|
+
table_created = False
|
|
218
|
+
if not self.table_exists(table_name):
|
|
219
|
+
table.create(self.db_engine, checkfirst=True)
|
|
220
|
+
log_debug(f"Successfully created table '{table_name}'")
|
|
221
|
+
table_created = True
|
|
222
|
+
else:
|
|
223
|
+
log_debug(f"Table {self.db_schema}.{table_name} already exists, skipping creation")
|
|
224
|
+
|
|
225
|
+
# Create indexes
|
|
226
|
+
for idx in table.indexes:
|
|
227
|
+
try:
|
|
228
|
+
# Check if index already exists
|
|
229
|
+
with self.Session() as sess:
|
|
230
|
+
exists_query = text(
|
|
231
|
+
"SELECT 1 FROM pg_indexes WHERE schemaname = :schema AND indexname = :index_name"
|
|
232
|
+
)
|
|
233
|
+
exists = (
|
|
234
|
+
sess.execute(exists_query, {"schema": self.db_schema, "index_name": idx.name}).scalar()
|
|
235
|
+
is not None
|
|
236
|
+
)
|
|
237
|
+
if exists:
|
|
238
|
+
log_debug(
|
|
239
|
+
f"Index {idx.name} already exists in {self.db_schema}.{table_name}, skipping creation"
|
|
240
|
+
)
|
|
241
|
+
continue
|
|
242
|
+
|
|
243
|
+
idx.create(self.db_engine)
|
|
244
|
+
log_debug(f"Created index: {idx.name} for table {self.db_schema}.{table_name}")
|
|
245
|
+
|
|
246
|
+
except Exception as e:
|
|
247
|
+
log_error(f"Error creating index {idx.name}: {e}")
|
|
248
|
+
|
|
249
|
+
# Store the schema version for the created table
|
|
250
|
+
if table_name != self.versions_table_name and table_created:
|
|
251
|
+
latest_schema_version = MigrationManager(self).latest_schema_version
|
|
252
|
+
self.upsert_schema_version(table_name=table_name, version=latest_schema_version.public)
|
|
253
|
+
return table
|
|
254
|
+
|
|
255
|
+
except Exception as e:
|
|
256
|
+
log_error(f"Could not create table {self.db_schema}.{table_name}: {e}")
|
|
257
|
+
raise
|
|
258
|
+
|
|
259
|
+
def _get_table(self, table_type: str, create_table_if_not_found: Optional[bool] = False) -> Optional[Table]:
|
|
260
|
+
if table_type == "sessions":
|
|
261
|
+
self.session_table = self._get_or_create_table(
|
|
262
|
+
table_name=self.session_table_name,
|
|
263
|
+
table_type="sessions",
|
|
264
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
265
|
+
)
|
|
266
|
+
return self.session_table
|
|
267
|
+
|
|
268
|
+
if table_type == "memories":
|
|
269
|
+
self.memory_table = self._get_or_create_table(
|
|
270
|
+
table_name=self.memory_table_name,
|
|
271
|
+
table_type="memories",
|
|
272
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
273
|
+
)
|
|
274
|
+
return self.memory_table
|
|
275
|
+
|
|
276
|
+
if table_type == "metrics":
|
|
277
|
+
self.metrics_table = self._get_or_create_table(
|
|
278
|
+
table_name=self.metrics_table_name,
|
|
279
|
+
table_type="metrics",
|
|
280
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
281
|
+
)
|
|
282
|
+
return self.metrics_table
|
|
283
|
+
|
|
284
|
+
if table_type == "evals":
|
|
285
|
+
self.eval_table = self._get_or_create_table(
|
|
286
|
+
table_name=self.eval_table_name,
|
|
287
|
+
table_type="evals",
|
|
288
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
289
|
+
)
|
|
290
|
+
return self.eval_table
|
|
291
|
+
|
|
292
|
+
if table_type == "knowledge":
|
|
293
|
+
self.knowledge_table = self._get_or_create_table(
|
|
294
|
+
table_name=self.knowledge_table_name,
|
|
295
|
+
table_type="knowledge",
|
|
296
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
297
|
+
)
|
|
298
|
+
return self.knowledge_table
|
|
299
|
+
|
|
300
|
+
if table_type == "culture":
|
|
301
|
+
self.culture_table = self._get_or_create_table(
|
|
302
|
+
table_name=self.culture_table_name,
|
|
303
|
+
table_type="culture",
|
|
304
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
305
|
+
)
|
|
306
|
+
return self.culture_table
|
|
307
|
+
|
|
308
|
+
if table_type == "versions":
|
|
309
|
+
self.versions_table = self._get_or_create_table(
|
|
310
|
+
table_name=self.versions_table_name,
|
|
311
|
+
table_type="versions",
|
|
312
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
313
|
+
)
|
|
314
|
+
return self.versions_table
|
|
315
|
+
|
|
316
|
+
if table_type == "traces":
|
|
317
|
+
self.traces_table = self._get_or_create_table(
|
|
318
|
+
table_name=self.trace_table_name,
|
|
319
|
+
table_type="traces",
|
|
320
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
321
|
+
)
|
|
322
|
+
return self.traces_table
|
|
323
|
+
|
|
324
|
+
if table_type == "spans":
|
|
325
|
+
# Ensure traces table exists first (spans has FK to traces)
|
|
326
|
+
if create_table_if_not_found:
|
|
327
|
+
self._get_table(table_type="traces", create_table_if_not_found=True)
|
|
328
|
+
|
|
329
|
+
self.spans_table = self._get_or_create_table(
|
|
330
|
+
table_name=self.span_table_name,
|
|
331
|
+
table_type="spans",
|
|
332
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
333
|
+
)
|
|
334
|
+
return self.spans_table
|
|
335
|
+
|
|
336
|
+
raise ValueError(f"Unknown table type: {table_type}")
|
|
337
|
+
|
|
338
|
+
def _get_or_create_table(
|
|
339
|
+
self, table_name: str, table_type: str, create_table_if_not_found: Optional[bool] = False
|
|
340
|
+
) -> Optional[Table]:
|
|
341
|
+
"""
|
|
342
|
+
Check if the table exists and is valid, else create it.
|
|
343
|
+
|
|
344
|
+
Args:
|
|
345
|
+
table_name (str): Name of the table to get or create
|
|
346
|
+
table_type (str): Type of table (used to get schema definition)
|
|
347
|
+
|
|
348
|
+
Returns:
|
|
349
|
+
Optional[Table]: SQLAlchemy Table object representing the schema.
|
|
350
|
+
"""
|
|
351
|
+
|
|
352
|
+
with self.Session() as sess, sess.begin():
|
|
353
|
+
table_is_available = is_table_available(session=sess, table_name=table_name, db_schema=self.db_schema)
|
|
354
|
+
|
|
355
|
+
if not table_is_available:
|
|
356
|
+
if not create_table_if_not_found:
|
|
357
|
+
return None
|
|
358
|
+
return self._create_table(table_name=table_name, table_type=table_type)
|
|
359
|
+
|
|
360
|
+
if not is_valid_table(
|
|
361
|
+
db_engine=self.db_engine,
|
|
362
|
+
table_name=table_name,
|
|
363
|
+
table_type=table_type,
|
|
364
|
+
db_schema=self.db_schema,
|
|
365
|
+
):
|
|
366
|
+
raise ValueError(f"Table {self.db_schema}.{table_name} has an invalid schema")
|
|
367
|
+
|
|
368
|
+
try:
|
|
369
|
+
table = Table(table_name, self.metadata, schema=self.db_schema, autoload_with=self.db_engine)
|
|
370
|
+
return table
|
|
371
|
+
|
|
372
|
+
except Exception as e:
|
|
373
|
+
log_error(f"Error loading existing table {self.db_schema}.{table_name}: {e}")
|
|
374
|
+
raise
|
|
375
|
+
|
|
376
|
+
def get_latest_schema_version(self, table_name: str):
|
|
377
|
+
"""Get the latest version of the database schema."""
|
|
378
|
+
table = self._get_table(table_type="versions", create_table_if_not_found=True)
|
|
379
|
+
if table is None:
|
|
380
|
+
return "2.0.0"
|
|
381
|
+
with self.Session() as sess:
|
|
382
|
+
stmt = select(table)
|
|
383
|
+
# Latest version for the given table
|
|
384
|
+
stmt = stmt.where(table.c.table_name == table_name)
|
|
385
|
+
stmt = stmt.order_by(table.c.version.desc()).limit(1)
|
|
386
|
+
result = sess.execute(stmt).fetchone()
|
|
387
|
+
if result is None:
|
|
388
|
+
return "2.0.0"
|
|
389
|
+
version_dict = dict(result._mapping)
|
|
390
|
+
return version_dict.get("version") or "2.0.0"
|
|
391
|
+
|
|
392
|
+
def upsert_schema_version(self, table_name: str, version: str) -> None:
|
|
393
|
+
"""Upsert the schema version into the database."""
|
|
394
|
+
table = self._get_table(table_type="versions", create_table_if_not_found=True)
|
|
395
|
+
if table is None:
|
|
396
|
+
return
|
|
397
|
+
current_datetime = datetime.now().isoformat()
|
|
398
|
+
with self.Session() as sess, sess.begin():
|
|
399
|
+
stmt = postgresql.insert(table).values(
|
|
400
|
+
table_name=table_name,
|
|
401
|
+
version=version,
|
|
402
|
+
created_at=current_datetime, # Store as ISO format string
|
|
403
|
+
updated_at=current_datetime,
|
|
404
|
+
)
|
|
405
|
+
# Update version if table_name already exists
|
|
406
|
+
stmt = stmt.on_conflict_do_update(
|
|
407
|
+
index_elements=["table_name"],
|
|
408
|
+
set_=dict(version=version, updated_at=current_datetime),
|
|
409
|
+
)
|
|
410
|
+
sess.execute(stmt)
|
|
411
|
+
|
|
412
|
+
# -- Session methods --
|
|
413
|
+
def delete_session(self, session_id: str) -> bool:
|
|
414
|
+
"""
|
|
415
|
+
Delete a session from the database.
|
|
416
|
+
|
|
417
|
+
Args:
|
|
418
|
+
session_id (str): ID of the session to delete
|
|
419
|
+
|
|
420
|
+
Returns:
|
|
421
|
+
bool: True if the session was deleted, False otherwise.
|
|
422
|
+
|
|
423
|
+
Raises:
|
|
424
|
+
Exception: If an error occurs during deletion.
|
|
425
|
+
"""
|
|
426
|
+
try:
|
|
427
|
+
table = self._get_table(table_type="sessions")
|
|
428
|
+
if table is None:
|
|
429
|
+
return False
|
|
430
|
+
|
|
431
|
+
with self.Session() as sess, sess.begin():
|
|
432
|
+
delete_stmt = table.delete().where(table.c.session_id == session_id)
|
|
433
|
+
result = sess.execute(delete_stmt)
|
|
434
|
+
|
|
435
|
+
if result.rowcount == 0:
|
|
436
|
+
log_debug(f"No session found to delete with session_id: {session_id} in table {table.name}")
|
|
437
|
+
return False
|
|
438
|
+
|
|
439
|
+
else:
|
|
440
|
+
log_debug(f"Successfully deleted session with session_id: {session_id} in table {table.name}")
|
|
441
|
+
return True
|
|
442
|
+
|
|
443
|
+
except Exception as e:
|
|
444
|
+
log_error(f"Error deleting session: {e}")
|
|
445
|
+
raise e
|
|
446
|
+
|
|
447
|
+
def delete_sessions(self, session_ids: List[str]) -> None:
|
|
448
|
+
"""Delete all given sessions from the database.
|
|
449
|
+
Can handle multiple session types in the same run.
|
|
450
|
+
|
|
451
|
+
Args:
|
|
452
|
+
session_ids (List[str]): The IDs of the sessions to delete.
|
|
453
|
+
|
|
454
|
+
Raises:
|
|
455
|
+
Exception: If an error occurs during deletion.
|
|
456
|
+
"""
|
|
457
|
+
try:
|
|
458
|
+
table = self._get_table(table_type="sessions")
|
|
459
|
+
if table is None:
|
|
460
|
+
return
|
|
461
|
+
|
|
462
|
+
with self.Session() as sess, sess.begin():
|
|
463
|
+
delete_stmt = table.delete().where(table.c.session_id.in_(session_ids))
|
|
464
|
+
result = sess.execute(delete_stmt)
|
|
465
|
+
|
|
466
|
+
log_debug(f"Successfully deleted {result.rowcount} sessions")
|
|
467
|
+
|
|
468
|
+
except Exception as e:
|
|
469
|
+
log_error(f"Error deleting sessions: {e}")
|
|
470
|
+
raise e
|
|
471
|
+
|
|
472
|
+
def get_session(
|
|
473
|
+
self,
|
|
474
|
+
session_id: str,
|
|
475
|
+
session_type: SessionType,
|
|
476
|
+
user_id: Optional[str] = None,
|
|
477
|
+
deserialize: Optional[bool] = True,
|
|
478
|
+
) -> Optional[Union[Session, Dict[str, Any]]]:
|
|
479
|
+
"""
|
|
480
|
+
Read a session from the database.
|
|
481
|
+
|
|
482
|
+
Args:
|
|
483
|
+
session_id (str): ID of the session to read.
|
|
484
|
+
session_type (SessionType): Type of session to get.
|
|
485
|
+
user_id (Optional[str]): User ID to filter by. Defaults to None.
|
|
486
|
+
deserialize (Optional[bool]): Whether to serialize the session. Defaults to True.
|
|
487
|
+
|
|
488
|
+
Returns:
|
|
489
|
+
Union[Session, Dict[str, Any], None]:
|
|
490
|
+
- When deserialize=True: Session object
|
|
491
|
+
- When deserialize=False: Session dictionary
|
|
492
|
+
|
|
493
|
+
Raises:
|
|
494
|
+
Exception: If an error occurs during retrieval.
|
|
495
|
+
"""
|
|
496
|
+
try:
|
|
497
|
+
table = self._get_table(table_type="sessions")
|
|
498
|
+
if table is None:
|
|
499
|
+
return None
|
|
500
|
+
|
|
501
|
+
with self.Session() as sess:
|
|
502
|
+
stmt = select(table).where(table.c.session_id == session_id)
|
|
503
|
+
|
|
504
|
+
if user_id is not None:
|
|
505
|
+
stmt = stmt.where(table.c.user_id == user_id)
|
|
506
|
+
result = sess.execute(stmt).fetchone()
|
|
507
|
+
if result is None:
|
|
508
|
+
return None
|
|
509
|
+
|
|
510
|
+
session = dict(result._mapping)
|
|
511
|
+
|
|
512
|
+
if not deserialize:
|
|
513
|
+
return session
|
|
514
|
+
|
|
515
|
+
if session_type == SessionType.AGENT:
|
|
516
|
+
return AgentSession.from_dict(session)
|
|
517
|
+
elif session_type == SessionType.TEAM:
|
|
518
|
+
return TeamSession.from_dict(session)
|
|
519
|
+
elif session_type == SessionType.WORKFLOW:
|
|
520
|
+
return WorkflowSession.from_dict(session)
|
|
521
|
+
else:
|
|
522
|
+
raise ValueError(f"Invalid session type: {session_type}")
|
|
523
|
+
|
|
524
|
+
except Exception as e:
|
|
525
|
+
log_error(f"Exception reading from session table: {e}")
|
|
526
|
+
raise e
|
|
527
|
+
|
|
528
|
+
def get_sessions(
|
|
529
|
+
self,
|
|
530
|
+
session_type: Optional[SessionType] = None,
|
|
531
|
+
user_id: Optional[str] = None,
|
|
532
|
+
component_id: Optional[str] = None,
|
|
533
|
+
session_name: Optional[str] = None,
|
|
534
|
+
start_timestamp: Optional[int] = None,
|
|
535
|
+
end_timestamp: Optional[int] = None,
|
|
536
|
+
limit: Optional[int] = None,
|
|
537
|
+
page: Optional[int] = None,
|
|
538
|
+
sort_by: Optional[str] = None,
|
|
539
|
+
sort_order: Optional[str] = None,
|
|
540
|
+
deserialize: Optional[bool] = True,
|
|
541
|
+
) -> Union[List[Session], Tuple[List[Dict[str, Any]], int]]:
|
|
542
|
+
"""
|
|
543
|
+
Get all sessions in the given table. Can filter by user_id and entity_id.
|
|
544
|
+
|
|
545
|
+
Args:
|
|
546
|
+
session_type (Optional[SessionType]): The type of session to get.
|
|
547
|
+
user_id (Optional[str]): The ID of the user to filter by.
|
|
548
|
+
entity_id (Optional[str]): The ID of the agent / workflow to filter by.
|
|
549
|
+
start_timestamp (Optional[int]): The start timestamp to filter by.
|
|
550
|
+
end_timestamp (Optional[int]): The end timestamp to filter by.
|
|
551
|
+
session_name (Optional[str]): The name of the session to filter by.
|
|
552
|
+
limit (Optional[int]): The maximum number of sessions to return. Defaults to None.
|
|
553
|
+
page (Optional[int]): The page number to return. Defaults to None.
|
|
554
|
+
sort_by (Optional[str]): The field to sort by. Defaults to None.
|
|
555
|
+
sort_order (Optional[str]): The sort order. Defaults to None.
|
|
556
|
+
deserialize (Optional[bool]): Whether to serialize the sessions. Defaults to True.
|
|
557
|
+
|
|
558
|
+
Returns:
|
|
559
|
+
Union[List[Session], Tuple[List[Dict], int]]:
|
|
560
|
+
- When deserialize=True: List of Session objects
|
|
561
|
+
- When deserialize=False: Tuple of (session dictionaries, total count)
|
|
562
|
+
|
|
563
|
+
Raises:
|
|
564
|
+
Exception: If an error occurs during retrieval.
|
|
565
|
+
"""
|
|
566
|
+
try:
|
|
567
|
+
table = self._get_table(table_type="sessions")
|
|
568
|
+
if table is None:
|
|
569
|
+
return [] if deserialize else ([], 0)
|
|
570
|
+
|
|
571
|
+
with self.Session() as sess, sess.begin():
|
|
572
|
+
stmt = select(table)
|
|
573
|
+
|
|
574
|
+
# Filtering
|
|
575
|
+
if user_id is not None:
|
|
576
|
+
stmt = stmt.where(table.c.user_id == user_id)
|
|
577
|
+
if component_id is not None:
|
|
578
|
+
if session_type == SessionType.AGENT:
|
|
579
|
+
stmt = stmt.where(table.c.agent_id == component_id)
|
|
580
|
+
elif session_type == SessionType.TEAM:
|
|
581
|
+
stmt = stmt.where(table.c.team_id == component_id)
|
|
582
|
+
elif session_type == SessionType.WORKFLOW:
|
|
583
|
+
stmt = stmt.where(table.c.workflow_id == component_id)
|
|
584
|
+
if start_timestamp is not None:
|
|
585
|
+
stmt = stmt.where(table.c.created_at >= start_timestamp)
|
|
586
|
+
if end_timestamp is not None:
|
|
587
|
+
stmt = stmt.where(table.c.created_at <= end_timestamp)
|
|
588
|
+
if session_name is not None:
|
|
589
|
+
stmt = stmt.where(
|
|
590
|
+
func.coalesce(func.json_extract_path_text(table.c.session_data, "session_name"), "").ilike(
|
|
591
|
+
f"%{session_name}%"
|
|
592
|
+
)
|
|
593
|
+
)
|
|
594
|
+
if session_type is not None:
|
|
595
|
+
session_type_value = session_type.value if isinstance(session_type, SessionType) else session_type
|
|
596
|
+
stmt = stmt.where(table.c.session_type == session_type_value)
|
|
597
|
+
|
|
598
|
+
count_stmt = select(func.count()).select_from(stmt.alias())
|
|
599
|
+
total_count = sess.execute(count_stmt).scalar()
|
|
600
|
+
|
|
601
|
+
# Sorting
|
|
602
|
+
stmt = apply_sorting(stmt, table, sort_by, sort_order)
|
|
603
|
+
|
|
604
|
+
# Paginating
|
|
605
|
+
if limit is not None:
|
|
606
|
+
stmt = stmt.limit(limit)
|
|
607
|
+
if page is not None:
|
|
608
|
+
stmt = stmt.offset((page - 1) * limit)
|
|
609
|
+
|
|
610
|
+
records = sess.execute(stmt).fetchall()
|
|
611
|
+
if records is None:
|
|
612
|
+
return [], 0
|
|
613
|
+
|
|
614
|
+
session = [dict(record._mapping) for record in records]
|
|
615
|
+
if not deserialize:
|
|
616
|
+
return session, total_count
|
|
617
|
+
|
|
618
|
+
if session_type == SessionType.AGENT:
|
|
619
|
+
return [AgentSession.from_dict(record) for record in session] # type: ignore
|
|
620
|
+
elif session_type == SessionType.TEAM:
|
|
621
|
+
return [TeamSession.from_dict(record) for record in session] # type: ignore
|
|
622
|
+
elif session_type == SessionType.WORKFLOW:
|
|
623
|
+
return [WorkflowSession.from_dict(record) for record in session] # type: ignore
|
|
624
|
+
else:
|
|
625
|
+
raise ValueError(f"Invalid session type: {session_type}")
|
|
626
|
+
|
|
627
|
+
except Exception as e:
|
|
628
|
+
log_error(f"Exception reading from session table: {e}")
|
|
629
|
+
raise e
|
|
630
|
+
|
|
631
|
+
def rename_session(
|
|
632
|
+
self, session_id: str, session_type: SessionType, session_name: str, deserialize: Optional[bool] = True
|
|
633
|
+
) -> Optional[Union[Session, Dict[str, Any]]]:
|
|
634
|
+
"""
|
|
635
|
+
Rename a session in the database.
|
|
636
|
+
|
|
637
|
+
Args:
|
|
638
|
+
session_id (str): The ID of the session to rename.
|
|
639
|
+
session_type (SessionType): The type of session to rename.
|
|
640
|
+
session_name (str): The new name for the session.
|
|
641
|
+
deserialize (Optional[bool]): Whether to serialize the session. Defaults to True.
|
|
642
|
+
|
|
643
|
+
Returns:
|
|
644
|
+
Optional[Union[Session, Dict[str, Any]]]:
|
|
645
|
+
- When deserialize=True: Session object
|
|
646
|
+
- When deserialize=False: Session dictionary
|
|
647
|
+
|
|
648
|
+
Raises:
|
|
649
|
+
Exception: If an error occurs during renaming.
|
|
650
|
+
"""
|
|
651
|
+
try:
|
|
652
|
+
table = self._get_table(table_type="sessions")
|
|
653
|
+
if table is None:
|
|
654
|
+
return None
|
|
655
|
+
|
|
656
|
+
with self.Session() as sess, sess.begin():
|
|
657
|
+
stmt = (
|
|
658
|
+
update(table)
|
|
659
|
+
.where(table.c.session_id == session_id)
|
|
660
|
+
.where(table.c.session_type == session_type.value)
|
|
661
|
+
.values(
|
|
662
|
+
session_data=func.cast(
|
|
663
|
+
func.jsonb_set(
|
|
664
|
+
func.cast(table.c.session_data, postgresql.JSONB),
|
|
665
|
+
text("'{session_name}'"),
|
|
666
|
+
func.to_jsonb(session_name),
|
|
667
|
+
),
|
|
668
|
+
postgresql.JSON,
|
|
669
|
+
)
|
|
670
|
+
)
|
|
671
|
+
.returning(*table.c)
|
|
672
|
+
)
|
|
673
|
+
result = sess.execute(stmt)
|
|
674
|
+
row = result.fetchone()
|
|
675
|
+
if not row:
|
|
676
|
+
return None
|
|
677
|
+
|
|
678
|
+
log_debug(f"Renamed session with id '{session_id}' to '{session_name}'")
|
|
679
|
+
|
|
680
|
+
session = dict(row._mapping)
|
|
681
|
+
if not deserialize:
|
|
682
|
+
return session
|
|
683
|
+
|
|
684
|
+
# Return the appropriate session type
|
|
685
|
+
if session_type == SessionType.AGENT:
|
|
686
|
+
return AgentSession.from_dict(session)
|
|
687
|
+
elif session_type == SessionType.TEAM:
|
|
688
|
+
return TeamSession.from_dict(session)
|
|
689
|
+
elif session_type == SessionType.WORKFLOW:
|
|
690
|
+
return WorkflowSession.from_dict(session)
|
|
691
|
+
else:
|
|
692
|
+
raise ValueError(f"Invalid session type: {session_type}")
|
|
693
|
+
|
|
694
|
+
except Exception as e:
|
|
695
|
+
log_error(f"Exception renaming session: {e}")
|
|
696
|
+
raise e
|
|
697
|
+
|
|
698
|
+
def upsert_session(
|
|
699
|
+
self, session: Session, deserialize: Optional[bool] = True
|
|
700
|
+
) -> Optional[Union[Session, Dict[str, Any]]]:
|
|
701
|
+
"""
|
|
702
|
+
Insert or update a session in the database.
|
|
703
|
+
|
|
704
|
+
Args:
|
|
705
|
+
session (Session): The session data to upsert.
|
|
706
|
+
deserialize (Optional[bool]): Whether to deserialize the session. Defaults to True.
|
|
707
|
+
|
|
708
|
+
Returns:
|
|
709
|
+
Optional[Union[Session, Dict[str, Any]]]:
|
|
710
|
+
- When deserialize=True: Session object
|
|
711
|
+
- When deserialize=False: Session dictionary
|
|
712
|
+
|
|
713
|
+
Raises:
|
|
714
|
+
Exception: If an error occurs during upsert.
|
|
715
|
+
"""
|
|
716
|
+
try:
|
|
717
|
+
table = self._get_table(table_type="sessions", create_table_if_not_found=True)
|
|
718
|
+
if table is None:
|
|
719
|
+
return None
|
|
720
|
+
|
|
721
|
+
session_dict = session.to_dict()
|
|
722
|
+
|
|
723
|
+
if isinstance(session, AgentSession):
|
|
724
|
+
with self.Session() as sess, sess.begin():
|
|
725
|
+
stmt = postgresql.insert(table).values(
|
|
726
|
+
session_id=session_dict.get("session_id"),
|
|
727
|
+
session_type=SessionType.AGENT.value,
|
|
728
|
+
agent_id=session_dict.get("agent_id"),
|
|
729
|
+
user_id=session_dict.get("user_id"),
|
|
730
|
+
runs=session_dict.get("runs"),
|
|
731
|
+
agent_data=session_dict.get("agent_data"),
|
|
732
|
+
session_data=session_dict.get("session_data"),
|
|
733
|
+
summary=session_dict.get("summary"),
|
|
734
|
+
metadata=session_dict.get("metadata"),
|
|
735
|
+
created_at=session_dict.get("created_at"),
|
|
736
|
+
updated_at=session_dict.get("created_at"),
|
|
737
|
+
)
|
|
738
|
+
stmt = stmt.on_conflict_do_update( # type: ignore
|
|
739
|
+
index_elements=["session_id"],
|
|
740
|
+
set_=dict(
|
|
741
|
+
agent_id=session_dict.get("agent_id"),
|
|
742
|
+
user_id=session_dict.get("user_id"),
|
|
743
|
+
agent_data=session_dict.get("agent_data"),
|
|
744
|
+
session_data=session_dict.get("session_data"),
|
|
745
|
+
summary=session_dict.get("summary"),
|
|
746
|
+
metadata=session_dict.get("metadata"),
|
|
747
|
+
runs=session_dict.get("runs"),
|
|
748
|
+
updated_at=int(time.time()),
|
|
749
|
+
),
|
|
750
|
+
).returning(table)
|
|
751
|
+
result = sess.execute(stmt)
|
|
752
|
+
row = result.fetchone()
|
|
753
|
+
session_dict = dict(row._mapping)
|
|
754
|
+
|
|
755
|
+
if session_dict is None or not deserialize:
|
|
756
|
+
return session_dict
|
|
757
|
+
return AgentSession.from_dict(session_dict)
|
|
758
|
+
|
|
759
|
+
elif isinstance(session, TeamSession):
|
|
760
|
+
with self.Session() as sess, sess.begin():
|
|
761
|
+
stmt = postgresql.insert(table).values(
|
|
762
|
+
session_id=session_dict.get("session_id"),
|
|
763
|
+
session_type=SessionType.TEAM.value,
|
|
764
|
+
team_id=session_dict.get("team_id"),
|
|
765
|
+
user_id=session_dict.get("user_id"),
|
|
766
|
+
runs=session_dict.get("runs"),
|
|
767
|
+
team_data=session_dict.get("team_data"),
|
|
768
|
+
session_data=session_dict.get("session_data"),
|
|
769
|
+
summary=session_dict.get("summary"),
|
|
770
|
+
metadata=session_dict.get("metadata"),
|
|
771
|
+
created_at=session_dict.get("created_at"),
|
|
772
|
+
updated_at=session_dict.get("created_at"),
|
|
773
|
+
)
|
|
774
|
+
stmt = stmt.on_conflict_do_update( # type: ignore
|
|
775
|
+
index_elements=["session_id"],
|
|
776
|
+
set_=dict(
|
|
777
|
+
team_id=session_dict.get("team_id"),
|
|
778
|
+
user_id=session_dict.get("user_id"),
|
|
779
|
+
team_data=session_dict.get("team_data"),
|
|
780
|
+
session_data=session_dict.get("session_data"),
|
|
781
|
+
summary=session_dict.get("summary"),
|
|
782
|
+
metadata=session_dict.get("metadata"),
|
|
783
|
+
runs=session_dict.get("runs"),
|
|
784
|
+
updated_at=int(time.time()),
|
|
785
|
+
),
|
|
786
|
+
).returning(table)
|
|
787
|
+
result = sess.execute(stmt)
|
|
788
|
+
row = result.fetchone()
|
|
789
|
+
session_dict = dict(row._mapping)
|
|
790
|
+
|
|
791
|
+
if session_dict is None or not deserialize:
|
|
792
|
+
return session_dict
|
|
793
|
+
return TeamSession.from_dict(session_dict)
|
|
794
|
+
|
|
795
|
+
elif isinstance(session, WorkflowSession):
|
|
796
|
+
with self.Session() as sess, sess.begin():
|
|
797
|
+
stmt = postgresql.insert(table).values(
|
|
798
|
+
session_id=session_dict.get("session_id"),
|
|
799
|
+
session_type=SessionType.WORKFLOW.value,
|
|
800
|
+
workflow_id=session_dict.get("workflow_id"),
|
|
801
|
+
user_id=session_dict.get("user_id"),
|
|
802
|
+
runs=session_dict.get("runs"),
|
|
803
|
+
workflow_data=session_dict.get("workflow_data"),
|
|
804
|
+
session_data=session_dict.get("session_data"),
|
|
805
|
+
summary=session_dict.get("summary"),
|
|
806
|
+
metadata=session_dict.get("metadata"),
|
|
807
|
+
created_at=session_dict.get("created_at"),
|
|
808
|
+
updated_at=session_dict.get("created_at"),
|
|
809
|
+
)
|
|
810
|
+
stmt = stmt.on_conflict_do_update( # type: ignore
|
|
811
|
+
index_elements=["session_id"],
|
|
812
|
+
set_=dict(
|
|
813
|
+
workflow_id=session_dict.get("workflow_id"),
|
|
814
|
+
user_id=session_dict.get("user_id"),
|
|
815
|
+
workflow_data=session_dict.get("workflow_data"),
|
|
816
|
+
session_data=session_dict.get("session_data"),
|
|
817
|
+
summary=session_dict.get("summary"),
|
|
818
|
+
metadata=session_dict.get("metadata"),
|
|
819
|
+
runs=session_dict.get("runs"),
|
|
820
|
+
updated_at=int(time.time()),
|
|
821
|
+
),
|
|
822
|
+
).returning(table)
|
|
823
|
+
result = sess.execute(stmt)
|
|
824
|
+
row = result.fetchone()
|
|
825
|
+
session_dict = dict(row._mapping)
|
|
826
|
+
|
|
827
|
+
if session_dict is None or not deserialize:
|
|
828
|
+
return session_dict
|
|
829
|
+
return WorkflowSession.from_dict(session_dict)
|
|
830
|
+
|
|
831
|
+
else:
|
|
832
|
+
raise ValueError(f"Invalid session type: {session.session_type}")
|
|
833
|
+
|
|
834
|
+
except Exception as e:
|
|
835
|
+
log_error(f"Exception upserting into sessions table: {e}")
|
|
836
|
+
raise e
|
|
837
|
+
|
|
838
|
+
def upsert_sessions(
|
|
839
|
+
self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
|
|
840
|
+
) -> List[Union[Session, Dict[str, Any]]]:
|
|
841
|
+
"""
|
|
842
|
+
Bulk insert or update multiple sessions.
|
|
843
|
+
|
|
844
|
+
Args:
|
|
845
|
+
sessions (List[Session]): The list of session data to upsert.
|
|
846
|
+
deserialize (Optional[bool]): Whether to deserialize the sessions. Defaults to True.
|
|
847
|
+
preserve_updated_at (bool): If True, preserve the updated_at from the session object.
|
|
848
|
+
|
|
849
|
+
Returns:
|
|
850
|
+
List[Union[Session, Dict[str, Any]]]: List of upserted sessions
|
|
851
|
+
|
|
852
|
+
Raises:
|
|
853
|
+
Exception: If an error occurs during bulk upsert.
|
|
854
|
+
"""
|
|
855
|
+
try:
|
|
856
|
+
if not sessions:
|
|
857
|
+
return []
|
|
858
|
+
|
|
859
|
+
table = self._get_table(table_type="sessions", create_table_if_not_found=True)
|
|
860
|
+
if table is None:
|
|
861
|
+
return []
|
|
862
|
+
|
|
863
|
+
# Group sessions by type for better handling
|
|
864
|
+
agent_sessions = [s for s in sessions if isinstance(s, AgentSession)]
|
|
865
|
+
team_sessions = [s for s in sessions if isinstance(s, TeamSession)]
|
|
866
|
+
workflow_sessions = [s for s in sessions if isinstance(s, WorkflowSession)]
|
|
867
|
+
|
|
868
|
+
results: List[Union[Session, Dict[str, Any]]] = []
|
|
869
|
+
|
|
870
|
+
# Bulk upsert agent sessions
|
|
871
|
+
if agent_sessions:
|
|
872
|
+
session_records = []
|
|
873
|
+
for agent_session in agent_sessions:
|
|
874
|
+
session_dict = agent_session.to_dict()
|
|
875
|
+
# Use preserved updated_at if flag is set (even if None), otherwise use current time
|
|
876
|
+
updated_at = session_dict.get("updated_at") if preserve_updated_at else int(time.time())
|
|
877
|
+
session_records.append(
|
|
878
|
+
{
|
|
879
|
+
"session_id": session_dict.get("session_id"),
|
|
880
|
+
"session_type": SessionType.AGENT.value,
|
|
881
|
+
"agent_id": session_dict.get("agent_id"),
|
|
882
|
+
"user_id": session_dict.get("user_id"),
|
|
883
|
+
"agent_data": session_dict.get("agent_data"),
|
|
884
|
+
"session_data": session_dict.get("session_data"),
|
|
885
|
+
"summary": session_dict.get("summary"),
|
|
886
|
+
"metadata": session_dict.get("metadata"),
|
|
887
|
+
"runs": session_dict.get("runs"),
|
|
888
|
+
"created_at": session_dict.get("created_at"),
|
|
889
|
+
"updated_at": updated_at,
|
|
890
|
+
}
|
|
891
|
+
)
|
|
892
|
+
|
|
893
|
+
with self.Session() as sess, sess.begin():
|
|
894
|
+
stmt: Any = postgresql.insert(table)
|
|
895
|
+
update_columns = {
|
|
896
|
+
col.name: stmt.excluded[col.name]
|
|
897
|
+
for col in table.columns
|
|
898
|
+
if col.name not in ["id", "session_id", "created_at"]
|
|
899
|
+
}
|
|
900
|
+
stmt = stmt.on_conflict_do_update(index_elements=["session_id"], set_=update_columns).returning(
|
|
901
|
+
table
|
|
902
|
+
)
|
|
903
|
+
|
|
904
|
+
result = sess.execute(stmt, session_records)
|
|
905
|
+
for row in result.fetchall():
|
|
906
|
+
session_dict = dict(row._mapping)
|
|
907
|
+
if deserialize:
|
|
908
|
+
deserialized_agent_session = AgentSession.from_dict(session_dict)
|
|
909
|
+
if deserialized_agent_session is None:
|
|
910
|
+
continue
|
|
911
|
+
results.append(deserialized_agent_session)
|
|
912
|
+
else:
|
|
913
|
+
results.append(session_dict)
|
|
914
|
+
|
|
915
|
+
# Bulk upsert team sessions
|
|
916
|
+
if team_sessions:
|
|
917
|
+
session_records = []
|
|
918
|
+
for team_session in team_sessions:
|
|
919
|
+
session_dict = team_session.to_dict()
|
|
920
|
+
# Use preserved updated_at if flag is set (even if None), otherwise use current time
|
|
921
|
+
updated_at = session_dict.get("updated_at") if preserve_updated_at else int(time.time())
|
|
922
|
+
session_records.append(
|
|
923
|
+
{
|
|
924
|
+
"session_id": session_dict.get("session_id"),
|
|
925
|
+
"session_type": SessionType.TEAM.value,
|
|
926
|
+
"team_id": session_dict.get("team_id"),
|
|
927
|
+
"user_id": session_dict.get("user_id"),
|
|
928
|
+
"team_data": session_dict.get("team_data"),
|
|
929
|
+
"session_data": session_dict.get("session_data"),
|
|
930
|
+
"summary": session_dict.get("summary"),
|
|
931
|
+
"metadata": session_dict.get("metadata"),
|
|
932
|
+
"runs": session_dict.get("runs"),
|
|
933
|
+
"created_at": session_dict.get("created_at"),
|
|
934
|
+
"updated_at": updated_at,
|
|
935
|
+
}
|
|
936
|
+
)
|
|
937
|
+
|
|
938
|
+
with self.Session() as sess, sess.begin():
|
|
939
|
+
stmt = postgresql.insert(table)
|
|
940
|
+
update_columns = {
|
|
941
|
+
col.name: stmt.excluded[col.name]
|
|
942
|
+
for col in table.columns
|
|
943
|
+
if col.name not in ["id", "session_id", "created_at"]
|
|
944
|
+
}
|
|
945
|
+
stmt = stmt.on_conflict_do_update(index_elements=["session_id"], set_=update_columns).returning(
|
|
946
|
+
table
|
|
947
|
+
)
|
|
948
|
+
|
|
949
|
+
result = sess.execute(stmt, session_records)
|
|
950
|
+
for row in result.fetchall():
|
|
951
|
+
session_dict = dict(row._mapping)
|
|
952
|
+
if deserialize:
|
|
953
|
+
deserialized_team_session = TeamSession.from_dict(session_dict)
|
|
954
|
+
if deserialized_team_session is None:
|
|
955
|
+
continue
|
|
956
|
+
results.append(deserialized_team_session)
|
|
957
|
+
else:
|
|
958
|
+
results.append(session_dict)
|
|
959
|
+
|
|
960
|
+
# Bulk upsert workflow sessions
|
|
961
|
+
if workflow_sessions:
|
|
962
|
+
session_records = []
|
|
963
|
+
for workflow_session in workflow_sessions:
|
|
964
|
+
session_dict = workflow_session.to_dict()
|
|
965
|
+
# Use preserved updated_at if flag is set (even if None), otherwise use current time
|
|
966
|
+
updated_at = session_dict.get("updated_at") if preserve_updated_at else int(time.time())
|
|
967
|
+
session_records.append(
|
|
968
|
+
{
|
|
969
|
+
"session_id": session_dict.get("session_id"),
|
|
970
|
+
"session_type": SessionType.WORKFLOW.value,
|
|
971
|
+
"workflow_id": session_dict.get("workflow_id"),
|
|
972
|
+
"user_id": session_dict.get("user_id"),
|
|
973
|
+
"workflow_data": session_dict.get("workflow_data"),
|
|
974
|
+
"session_data": session_dict.get("session_data"),
|
|
975
|
+
"summary": session_dict.get("summary"),
|
|
976
|
+
"metadata": session_dict.get("metadata"),
|
|
977
|
+
"runs": session_dict.get("runs"),
|
|
978
|
+
"created_at": session_dict.get("created_at"),
|
|
979
|
+
"updated_at": updated_at,
|
|
980
|
+
}
|
|
981
|
+
)
|
|
982
|
+
|
|
983
|
+
with self.Session() as sess, sess.begin():
|
|
984
|
+
stmt = postgresql.insert(table)
|
|
985
|
+
update_columns = {
|
|
986
|
+
col.name: stmt.excluded[col.name]
|
|
987
|
+
for col in table.columns
|
|
988
|
+
if col.name not in ["id", "session_id", "created_at"]
|
|
989
|
+
}
|
|
990
|
+
stmt = stmt.on_conflict_do_update(index_elements=["session_id"], set_=update_columns).returning(
|
|
991
|
+
table
|
|
992
|
+
)
|
|
993
|
+
|
|
994
|
+
result = sess.execute(stmt, session_records)
|
|
995
|
+
for row in result.fetchall():
|
|
996
|
+
session_dict = dict(row._mapping)
|
|
997
|
+
if deserialize:
|
|
998
|
+
deserialized_workflow_session = WorkflowSession.from_dict(session_dict)
|
|
999
|
+
if deserialized_workflow_session is None:
|
|
1000
|
+
continue
|
|
1001
|
+
results.append(deserialized_workflow_session)
|
|
1002
|
+
else:
|
|
1003
|
+
results.append(session_dict)
|
|
1004
|
+
|
|
1005
|
+
return results
|
|
1006
|
+
|
|
1007
|
+
except Exception as e:
|
|
1008
|
+
log_error(f"Exception bulk upserting sessions: {e}")
|
|
1009
|
+
return []
|
|
1010
|
+
|
|
1011
|
+
# -- Memory methods --
|
|
1012
|
+
def delete_user_memory(self, memory_id: str, user_id: Optional[str] = None):
|
|
1013
|
+
"""Delete a user memory from the database.
|
|
1014
|
+
|
|
1015
|
+
Args:
|
|
1016
|
+
memory_id (str): The ID of the memory to delete.
|
|
1017
|
+
user_id (Optional[str]): The ID of the user to filter by. Defaults to None.
|
|
1018
|
+
|
|
1019
|
+
Returns:
|
|
1020
|
+
bool: True if deletion was successful, False otherwise.
|
|
1021
|
+
|
|
1022
|
+
Raises:
|
|
1023
|
+
Exception: If an error occurs during deletion.
|
|
1024
|
+
"""
|
|
1025
|
+
try:
|
|
1026
|
+
table = self._get_table(table_type="memories")
|
|
1027
|
+
if table is None:
|
|
1028
|
+
return
|
|
1029
|
+
|
|
1030
|
+
with self.Session() as sess, sess.begin():
|
|
1031
|
+
delete_stmt = table.delete().where(table.c.memory_id == memory_id)
|
|
1032
|
+
|
|
1033
|
+
if user_id is not None:
|
|
1034
|
+
delete_stmt = delete_stmt.where(table.c.user_id == user_id)
|
|
1035
|
+
|
|
1036
|
+
result = sess.execute(delete_stmt)
|
|
1037
|
+
|
|
1038
|
+
success = result.rowcount > 0
|
|
1039
|
+
if success:
|
|
1040
|
+
log_debug(f"Successfully deleted user memory id: {memory_id}")
|
|
1041
|
+
else:
|
|
1042
|
+
log_debug(f"No user memory found with id: {memory_id}")
|
|
1043
|
+
|
|
1044
|
+
except Exception as e:
|
|
1045
|
+
log_error(f"Error deleting user memory: {e}")
|
|
1046
|
+
raise e
|
|
1047
|
+
|
|
1048
|
+
def delete_user_memories(self, memory_ids: List[str], user_id: Optional[str] = None) -> None:
|
|
1049
|
+
"""Delete user memories from the database.
|
|
1050
|
+
|
|
1051
|
+
Args:
|
|
1052
|
+
memory_ids (List[str]): The IDs of the memories to delete.
|
|
1053
|
+
user_id (Optional[str]): The ID of the user to filter by. Defaults to None.
|
|
1054
|
+
|
|
1055
|
+
Raises:
|
|
1056
|
+
Exception: If an error occurs during deletion.
|
|
1057
|
+
"""
|
|
1058
|
+
try:
|
|
1059
|
+
table = self._get_table(table_type="memories")
|
|
1060
|
+
if table is None:
|
|
1061
|
+
return
|
|
1062
|
+
|
|
1063
|
+
with self.Session() as sess, sess.begin():
|
|
1064
|
+
delete_stmt = table.delete().where(table.c.memory_id.in_(memory_ids))
|
|
1065
|
+
|
|
1066
|
+
if user_id is not None:
|
|
1067
|
+
delete_stmt = delete_stmt.where(table.c.user_id == user_id)
|
|
1068
|
+
|
|
1069
|
+
result = sess.execute(delete_stmt)
|
|
1070
|
+
|
|
1071
|
+
if result.rowcount == 0:
|
|
1072
|
+
log_debug(f"No user memories found with ids: {memory_ids}")
|
|
1073
|
+
else:
|
|
1074
|
+
log_debug(f"Successfully deleted {result.rowcount} user memories")
|
|
1075
|
+
|
|
1076
|
+
except Exception as e:
|
|
1077
|
+
log_error(f"Error deleting user memories: {e}")
|
|
1078
|
+
raise e
|
|
1079
|
+
|
|
1080
|
+
def get_all_memory_topics(self) -> List[str]:
|
|
1081
|
+
"""Get all memory topics from the database.
|
|
1082
|
+
|
|
1083
|
+
Returns:
|
|
1084
|
+
List[str]: List of memory topics.
|
|
1085
|
+
"""
|
|
1086
|
+
try:
|
|
1087
|
+
table = self._get_table(table_type="memories")
|
|
1088
|
+
if table is None:
|
|
1089
|
+
return []
|
|
1090
|
+
|
|
1091
|
+
with self.Session() as sess, sess.begin():
|
|
1092
|
+
try:
|
|
1093
|
+
stmt = select(func.jsonb_array_elements_text(table.c.topics))
|
|
1094
|
+
result = sess.execute(stmt).fetchall()
|
|
1095
|
+
except ProgrammingError:
|
|
1096
|
+
# Retrying with json_array_elements_text. This works in older versions,
|
|
1097
|
+
# where the topics column was of type JSON instead of JSONB
|
|
1098
|
+
stmt = select(func.json_array_elements_text(table.c.topics))
|
|
1099
|
+
result = sess.execute(stmt).fetchall()
|
|
1100
|
+
|
|
1101
|
+
return list(set([record[0] for record in result]))
|
|
1102
|
+
|
|
1103
|
+
except Exception as e:
|
|
1104
|
+
log_error(f"Exception reading from memory table: {e}")
|
|
1105
|
+
return []
|
|
1106
|
+
|
|
1107
|
+
def get_user_memory(
|
|
1108
|
+
self, memory_id: str, deserialize: Optional[bool] = True, user_id: Optional[str] = None
|
|
1109
|
+
) -> Optional[Union[UserMemory, Dict[str, Any]]]:
|
|
1110
|
+
"""Get a memory from the database.
|
|
1111
|
+
|
|
1112
|
+
Args:
|
|
1113
|
+
memory_id (str): The ID of the memory to get.
|
|
1114
|
+
deserialize (Optional[bool]): Whether to serialize the memory. Defaults to True.
|
|
1115
|
+
user_id (Optional[str]): The ID of the user to filter by. Defaults to None.
|
|
1116
|
+
|
|
1117
|
+
Returns:
|
|
1118
|
+
Union[UserMemory, Dict[str, Any], None]:
|
|
1119
|
+
- When deserialize=True: UserMemory object
|
|
1120
|
+
- When deserialize=False: UserMemory dictionary
|
|
1121
|
+
|
|
1122
|
+
Raises:
|
|
1123
|
+
Exception: If an error occurs during retrieval.
|
|
1124
|
+
"""
|
|
1125
|
+
try:
|
|
1126
|
+
table = self._get_table(table_type="memories")
|
|
1127
|
+
if table is None:
|
|
1128
|
+
return None
|
|
1129
|
+
|
|
1130
|
+
with self.Session() as sess, sess.begin():
|
|
1131
|
+
stmt = select(table).where(table.c.memory_id == memory_id)
|
|
1132
|
+
|
|
1133
|
+
if user_id is not None:
|
|
1134
|
+
stmt = stmt.where(table.c.user_id == user_id)
|
|
1135
|
+
|
|
1136
|
+
result = sess.execute(stmt).fetchone()
|
|
1137
|
+
if not result:
|
|
1138
|
+
return None
|
|
1139
|
+
|
|
1140
|
+
memory_raw = dict(result._mapping)
|
|
1141
|
+
if not deserialize:
|
|
1142
|
+
return memory_raw
|
|
1143
|
+
|
|
1144
|
+
return UserMemory.from_dict(memory_raw)
|
|
1145
|
+
|
|
1146
|
+
except Exception as e:
|
|
1147
|
+
log_error(f"Exception reading from memory table: {e}")
|
|
1148
|
+
raise e
|
|
1149
|
+
|
|
1150
|
+
def get_user_memories(
|
|
1151
|
+
self,
|
|
1152
|
+
user_id: Optional[str] = None,
|
|
1153
|
+
agent_id: Optional[str] = None,
|
|
1154
|
+
team_id: Optional[str] = None,
|
|
1155
|
+
topics: Optional[List[str]] = None,
|
|
1156
|
+
search_content: Optional[str] = None,
|
|
1157
|
+
limit: Optional[int] = None,
|
|
1158
|
+
page: Optional[int] = None,
|
|
1159
|
+
sort_by: Optional[str] = None,
|
|
1160
|
+
sort_order: Optional[str] = None,
|
|
1161
|
+
deserialize: Optional[bool] = True,
|
|
1162
|
+
) -> Union[List[UserMemory], Tuple[List[Dict[str, Any]], int]]:
|
|
1163
|
+
"""Get all memories from the database as UserMemory objects.
|
|
1164
|
+
|
|
1165
|
+
Args:
|
|
1166
|
+
user_id (Optional[str]): The ID of the user to filter by.
|
|
1167
|
+
agent_id (Optional[str]): The ID of the agent to filter by.
|
|
1168
|
+
team_id (Optional[str]): The ID of the team to filter by.
|
|
1169
|
+
topics (Optional[List[str]]): The topics to filter by.
|
|
1170
|
+
search_content (Optional[str]): The content to search for.
|
|
1171
|
+
limit (Optional[int]): The maximum number of memories to return.
|
|
1172
|
+
page (Optional[int]): The page number.
|
|
1173
|
+
sort_by (Optional[str]): The column to sort by.
|
|
1174
|
+
sort_order (Optional[str]): The order to sort by.
|
|
1175
|
+
deserialize (Optional[bool]): Whether to serialize the memories. Defaults to True.
|
|
1176
|
+
|
|
1177
|
+
|
|
1178
|
+
Returns:
|
|
1179
|
+
Union[List[UserMemory], Tuple[List[Dict[str, Any]], int]]:
|
|
1180
|
+
- When deserialize=True: List of UserMemory objects
|
|
1181
|
+
- When deserialize=False: Tuple of (memory dictionaries, total count)
|
|
1182
|
+
|
|
1183
|
+
Raises:
|
|
1184
|
+
Exception: If an error occurs during retrieval.
|
|
1185
|
+
"""
|
|
1186
|
+
try:
|
|
1187
|
+
table = self._get_table(table_type="memories")
|
|
1188
|
+
if table is None:
|
|
1189
|
+
return [] if deserialize else ([], 0)
|
|
1190
|
+
|
|
1191
|
+
with self.Session() as sess, sess.begin():
|
|
1192
|
+
stmt = select(table)
|
|
1193
|
+
# Filtering
|
|
1194
|
+
if user_id is not None:
|
|
1195
|
+
stmt = stmt.where(table.c.user_id == user_id)
|
|
1196
|
+
if agent_id is not None:
|
|
1197
|
+
stmt = stmt.where(table.c.agent_id == agent_id)
|
|
1198
|
+
if team_id is not None:
|
|
1199
|
+
stmt = stmt.where(table.c.team_id == team_id)
|
|
1200
|
+
if topics is not None:
|
|
1201
|
+
for topic in topics:
|
|
1202
|
+
stmt = stmt.where(func.cast(table.c.topics, String).like(f'%"{topic}"%'))
|
|
1203
|
+
if search_content is not None:
|
|
1204
|
+
stmt = stmt.where(func.cast(table.c.memory, postgresql.TEXT).ilike(f"%{search_content}%"))
|
|
1205
|
+
|
|
1206
|
+
# Get total count after applying filtering
|
|
1207
|
+
count_stmt = select(func.count()).select_from(stmt.alias())
|
|
1208
|
+
total_count = sess.execute(count_stmt).scalar()
|
|
1209
|
+
|
|
1210
|
+
# Sorting
|
|
1211
|
+
stmt = apply_sorting(stmt, table, sort_by, sort_order)
|
|
1212
|
+
|
|
1213
|
+
# Paginating
|
|
1214
|
+
if limit is not None:
|
|
1215
|
+
stmt = stmt.limit(limit)
|
|
1216
|
+
if page is not None:
|
|
1217
|
+
stmt = stmt.offset((page - 1) * limit)
|
|
1218
|
+
|
|
1219
|
+
result = sess.execute(stmt).fetchall()
|
|
1220
|
+
if not result:
|
|
1221
|
+
return [] if deserialize else ([], 0)
|
|
1222
|
+
|
|
1223
|
+
memories_raw = [record._mapping for record in result]
|
|
1224
|
+
if not deserialize:
|
|
1225
|
+
return memories_raw, total_count
|
|
1226
|
+
|
|
1227
|
+
return [UserMemory.from_dict(record) for record in memories_raw]
|
|
1228
|
+
|
|
1229
|
+
except Exception as e:
|
|
1230
|
+
log_error(f"Exception reading from memory table: {e}")
|
|
1231
|
+
raise e
|
|
1232
|
+
|
|
1233
|
+
def clear_memories(self) -> None:
|
|
1234
|
+
"""Delete all memories from the database.
|
|
1235
|
+
|
|
1236
|
+
Raises:
|
|
1237
|
+
Exception: If an error occurs during deletion.
|
|
1238
|
+
"""
|
|
1239
|
+
try:
|
|
1240
|
+
table = self._get_table(table_type="memories")
|
|
1241
|
+
if table is None:
|
|
1242
|
+
return
|
|
1243
|
+
|
|
1244
|
+
with self.Session() as sess, sess.begin():
|
|
1245
|
+
sess.execute(table.delete())
|
|
1246
|
+
|
|
1247
|
+
except Exception as e:
|
|
1248
|
+
log_error(f"Exception deleting all memories: {e}")
|
|
1249
|
+
raise e
|
|
1250
|
+
|
|
1251
|
+
def get_user_memory_stats(
|
|
1252
|
+
self, limit: Optional[int] = None, page: Optional[int] = None, user_id: Optional[str] = None
|
|
1253
|
+
) -> Tuple[List[Dict[str, Any]], int]:
|
|
1254
|
+
"""Get user memories stats.
|
|
1255
|
+
|
|
1256
|
+
Args:
|
|
1257
|
+
limit (Optional[int]): The maximum number of user stats to return.
|
|
1258
|
+
page (Optional[int]): The page number.
|
|
1259
|
+
user_id (Optional[str]): User ID for filtering.
|
|
1260
|
+
|
|
1261
|
+
Returns:
|
|
1262
|
+
Tuple[List[Dict[str, Any]], int]: A list of dictionaries containing user stats and total count.
|
|
1263
|
+
|
|
1264
|
+
Example:
|
|
1265
|
+
(
|
|
1266
|
+
[
|
|
1267
|
+
{
|
|
1268
|
+
"user_id": "123",
|
|
1269
|
+
"total_memories": 10,
|
|
1270
|
+
"last_memory_updated_at": 1714560000,
|
|
1271
|
+
},
|
|
1272
|
+
],
|
|
1273
|
+
total_count: 1,
|
|
1274
|
+
)
|
|
1275
|
+
"""
|
|
1276
|
+
try:
|
|
1277
|
+
table = self._get_table(table_type="memories")
|
|
1278
|
+
if table is None:
|
|
1279
|
+
return [], 0
|
|
1280
|
+
|
|
1281
|
+
with self.Session() as sess, sess.begin():
|
|
1282
|
+
stmt = select(
|
|
1283
|
+
table.c.user_id,
|
|
1284
|
+
func.count(table.c.memory_id).label("total_memories"),
|
|
1285
|
+
func.max(table.c.updated_at).label("last_memory_updated_at"),
|
|
1286
|
+
)
|
|
1287
|
+
if user_id is not None:
|
|
1288
|
+
stmt = stmt.where(table.c.user_id == user_id)
|
|
1289
|
+
else:
|
|
1290
|
+
stmt = stmt.where(table.c.user_id.is_not(None))
|
|
1291
|
+
stmt = stmt.group_by(table.c.user_id)
|
|
1292
|
+
stmt = stmt.order_by(func.max(table.c.updated_at).desc())
|
|
1293
|
+
|
|
1294
|
+
count_stmt = select(func.count()).select_from(stmt.alias())
|
|
1295
|
+
total_count = sess.execute(count_stmt).scalar()
|
|
1296
|
+
|
|
1297
|
+
# Pagination
|
|
1298
|
+
if limit is not None:
|
|
1299
|
+
stmt = stmt.limit(limit)
|
|
1300
|
+
if page is not None:
|
|
1301
|
+
stmt = stmt.offset((page - 1) * limit)
|
|
1302
|
+
|
|
1303
|
+
result = sess.execute(stmt).fetchall()
|
|
1304
|
+
if not result:
|
|
1305
|
+
return [], 0
|
|
1306
|
+
|
|
1307
|
+
return [
|
|
1308
|
+
{
|
|
1309
|
+
"user_id": record.user_id, # type: ignore
|
|
1310
|
+
"total_memories": record.total_memories,
|
|
1311
|
+
"last_memory_updated_at": record.last_memory_updated_at,
|
|
1312
|
+
}
|
|
1313
|
+
for record in result
|
|
1314
|
+
], total_count
|
|
1315
|
+
|
|
1316
|
+
except Exception as e:
|
|
1317
|
+
log_error(f"Exception getting user memory stats: {e}")
|
|
1318
|
+
raise e
|
|
1319
|
+
|
|
1320
|
+
def upsert_user_memory(
|
|
1321
|
+
self, memory: UserMemory, deserialize: Optional[bool] = True
|
|
1322
|
+
) -> Optional[Union[UserMemory, Dict[str, Any]]]:
|
|
1323
|
+
"""Upsert a user memory in the database.
|
|
1324
|
+
|
|
1325
|
+
Args:
|
|
1326
|
+
memory (UserMemory): The user memory to upsert.
|
|
1327
|
+
deserialize (Optional[bool]): Whether to serialize the memory. Defaults to True.
|
|
1328
|
+
|
|
1329
|
+
Returns:
|
|
1330
|
+
Optional[Union[UserMemory, Dict[str, Any]]]:
|
|
1331
|
+
- When deserialize=True: UserMemory object
|
|
1332
|
+
- When deserialize=False: UserMemory dictionary
|
|
1333
|
+
|
|
1334
|
+
Raises:
|
|
1335
|
+
Exception: If an error occurs during upsert.
|
|
1336
|
+
"""
|
|
1337
|
+
try:
|
|
1338
|
+
table = self._get_table(table_type="memories", create_table_if_not_found=True)
|
|
1339
|
+
if table is None:
|
|
1340
|
+
return None
|
|
1341
|
+
|
|
1342
|
+
with self.Session() as sess, sess.begin():
|
|
1343
|
+
if memory.memory_id is None:
|
|
1344
|
+
memory.memory_id = str(uuid4())
|
|
1345
|
+
|
|
1346
|
+
current_time = int(time.time())
|
|
1347
|
+
|
|
1348
|
+
stmt = postgresql.insert(table).values(
|
|
1349
|
+
memory_id=memory.memory_id,
|
|
1350
|
+
memory=memory.memory,
|
|
1351
|
+
input=memory.input,
|
|
1352
|
+
user_id=memory.user_id,
|
|
1353
|
+
agent_id=memory.agent_id,
|
|
1354
|
+
team_id=memory.team_id,
|
|
1355
|
+
topics=memory.topics,
|
|
1356
|
+
feedback=memory.feedback,
|
|
1357
|
+
created_at=memory.created_at,
|
|
1358
|
+
updated_at=memory.created_at,
|
|
1359
|
+
)
|
|
1360
|
+
stmt = stmt.on_conflict_do_update( # type: ignore
|
|
1361
|
+
index_elements=["memory_id"],
|
|
1362
|
+
set_=dict(
|
|
1363
|
+
memory=memory.memory,
|
|
1364
|
+
topics=memory.topics,
|
|
1365
|
+
input=memory.input,
|
|
1366
|
+
agent_id=memory.agent_id,
|
|
1367
|
+
team_id=memory.team_id,
|
|
1368
|
+
feedback=memory.feedback,
|
|
1369
|
+
updated_at=current_time,
|
|
1370
|
+
# Preserve created_at on update - don't overwrite existing value
|
|
1371
|
+
created_at=table.c.created_at,
|
|
1372
|
+
),
|
|
1373
|
+
).returning(table)
|
|
1374
|
+
|
|
1375
|
+
result = sess.execute(stmt)
|
|
1376
|
+
row = result.fetchone()
|
|
1377
|
+
|
|
1378
|
+
memory_raw = dict(row._mapping)
|
|
1379
|
+
|
|
1380
|
+
if not memory_raw or not deserialize:
|
|
1381
|
+
return memory_raw
|
|
1382
|
+
|
|
1383
|
+
return UserMemory.from_dict(memory_raw)
|
|
1384
|
+
|
|
1385
|
+
except Exception as e:
|
|
1386
|
+
log_error(f"Exception upserting user memory: {e}")
|
|
1387
|
+
raise e
|
|
1388
|
+
|
|
1389
|
+
def upsert_memories(
|
|
1390
|
+
self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
|
|
1391
|
+
) -> List[Union[UserMemory, Dict[str, Any]]]:
|
|
1392
|
+
"""
|
|
1393
|
+
Bulk insert or update multiple memories in the database for improved performance.
|
|
1394
|
+
|
|
1395
|
+
Args:
|
|
1396
|
+
memories (List[UserMemory]): The list of memories to upsert.
|
|
1397
|
+
deserialize (Optional[bool]): Whether to deserialize the memories. Defaults to True.
|
|
1398
|
+
preserve_updated_at (bool): If True, preserve the updated_at from the memory object.
|
|
1399
|
+
If False (default), set updated_at to current time.
|
|
1400
|
+
|
|
1401
|
+
Returns:
|
|
1402
|
+
List[Union[UserMemory, Dict[str, Any]]]: List of upserted memories
|
|
1403
|
+
|
|
1404
|
+
Raises:
|
|
1405
|
+
Exception: If an error occurs during bulk upsert.
|
|
1406
|
+
"""
|
|
1407
|
+
try:
|
|
1408
|
+
if not memories:
|
|
1409
|
+
return []
|
|
1410
|
+
|
|
1411
|
+
table = self._get_table(table_type="memories", create_table_if_not_found=True)
|
|
1412
|
+
if table is None:
|
|
1413
|
+
return []
|
|
1414
|
+
|
|
1415
|
+
# Prepare memory records for bulk insert
|
|
1416
|
+
memory_records = []
|
|
1417
|
+
current_time = int(time.time())
|
|
1418
|
+
|
|
1419
|
+
for memory in memories:
|
|
1420
|
+
if memory.memory_id is None:
|
|
1421
|
+
memory.memory_id = str(uuid4())
|
|
1422
|
+
|
|
1423
|
+
# Use preserved updated_at if flag is set (even if None), otherwise use current time
|
|
1424
|
+
updated_at = memory.updated_at if preserve_updated_at else current_time
|
|
1425
|
+
|
|
1426
|
+
memory_records.append(
|
|
1427
|
+
{
|
|
1428
|
+
"memory_id": memory.memory_id,
|
|
1429
|
+
"memory": memory.memory,
|
|
1430
|
+
"input": memory.input,
|
|
1431
|
+
"user_id": memory.user_id,
|
|
1432
|
+
"agent_id": memory.agent_id,
|
|
1433
|
+
"team_id": memory.team_id,
|
|
1434
|
+
"topics": memory.topics,
|
|
1435
|
+
"feedback": memory.feedback,
|
|
1436
|
+
"created_at": memory.created_at,
|
|
1437
|
+
"updated_at": updated_at,
|
|
1438
|
+
}
|
|
1439
|
+
)
|
|
1440
|
+
|
|
1441
|
+
results: List[Union[UserMemory, Dict[str, Any]]] = []
|
|
1442
|
+
|
|
1443
|
+
with self.Session() as sess, sess.begin():
|
|
1444
|
+
insert_stmt = postgresql.insert(table)
|
|
1445
|
+
update_columns = {
|
|
1446
|
+
col.name: insert_stmt.excluded[col.name]
|
|
1447
|
+
for col in table.columns
|
|
1448
|
+
if col.name not in ["memory_id", "created_at"] # Don't update primary key or created_at
|
|
1449
|
+
}
|
|
1450
|
+
stmt = insert_stmt.on_conflict_do_update(index_elements=["memory_id"], set_=update_columns).returning(
|
|
1451
|
+
table
|
|
1452
|
+
)
|
|
1453
|
+
|
|
1454
|
+
result = sess.execute(stmt, memory_records)
|
|
1455
|
+
for row in result.fetchall():
|
|
1456
|
+
memory_dict = dict(row._mapping)
|
|
1457
|
+
if deserialize:
|
|
1458
|
+
deserialized_memory = UserMemory.from_dict(memory_dict)
|
|
1459
|
+
if deserialized_memory is None:
|
|
1460
|
+
continue
|
|
1461
|
+
results.append(deserialized_memory)
|
|
1462
|
+
else:
|
|
1463
|
+
results.append(memory_dict)
|
|
1464
|
+
|
|
1465
|
+
return results
|
|
1466
|
+
|
|
1467
|
+
except Exception as e:
|
|
1468
|
+
log_error(f"Exception bulk upserting memories: {e}")
|
|
1469
|
+
return []
|
|
1470
|
+
|
|
1471
|
+
# -- Metrics methods --
|
|
1472
|
+
def _get_all_sessions_for_metrics_calculation(
|
|
1473
|
+
self, start_timestamp: Optional[int] = None, end_timestamp: Optional[int] = None
|
|
1474
|
+
) -> List[Dict[str, Any]]:
|
|
1475
|
+
"""
|
|
1476
|
+
Get all sessions of all types (agent, team, workflow) as raw dictionaries.
|
|
1477
|
+
|
|
1478
|
+
Args:
|
|
1479
|
+
start_timestamp (Optional[int]): The start timestamp to filter by. Defaults to None.
|
|
1480
|
+
end_timestamp (Optional[int]): The end timestamp to filter by. Defaults to None.
|
|
1481
|
+
|
|
1482
|
+
Returns:
|
|
1483
|
+
List[Dict[str, Any]]: List of session dictionaries with session_type field.
|
|
1484
|
+
|
|
1485
|
+
Raises:
|
|
1486
|
+
Exception: If an error occurs during retrieval.
|
|
1487
|
+
"""
|
|
1488
|
+
try:
|
|
1489
|
+
table = self._get_table(table_type="sessions")
|
|
1490
|
+
if table is None:
|
|
1491
|
+
return []
|
|
1492
|
+
|
|
1493
|
+
stmt = select(
|
|
1494
|
+
table.c.user_id,
|
|
1495
|
+
table.c.session_data,
|
|
1496
|
+
table.c.runs,
|
|
1497
|
+
table.c.created_at,
|
|
1498
|
+
table.c.session_type,
|
|
1499
|
+
)
|
|
1500
|
+
|
|
1501
|
+
if start_timestamp is not None:
|
|
1502
|
+
stmt = stmt.where(table.c.created_at >= start_timestamp)
|
|
1503
|
+
if end_timestamp is not None:
|
|
1504
|
+
stmt = stmt.where(table.c.created_at <= end_timestamp)
|
|
1505
|
+
|
|
1506
|
+
with self.Session() as sess:
|
|
1507
|
+
result = sess.execute(stmt).fetchall()
|
|
1508
|
+
|
|
1509
|
+
return [record._mapping for record in result]
|
|
1510
|
+
|
|
1511
|
+
except Exception as e:
|
|
1512
|
+
log_error(f"Exception reading from sessions table: {e}")
|
|
1513
|
+
raise e
|
|
1514
|
+
|
|
1515
|
+
def _get_metrics_calculation_starting_date(self, table: Table) -> Optional[date]:
|
|
1516
|
+
"""Get the first date for which metrics calculation is needed:
|
|
1517
|
+
|
|
1518
|
+
1. If there are metrics records, return the date of the first day without a complete metrics record.
|
|
1519
|
+
2. If there are no metrics records, return the date of the first recorded session.
|
|
1520
|
+
3. If there are no metrics records and no sessions records, return None.
|
|
1521
|
+
|
|
1522
|
+
Args:
|
|
1523
|
+
table (Table): The table to get the starting date for.
|
|
1524
|
+
|
|
1525
|
+
Returns:
|
|
1526
|
+
Optional[date]: The starting date for which metrics calculation is needed.
|
|
1527
|
+
"""
|
|
1528
|
+
with self.Session() as sess:
|
|
1529
|
+
stmt = select(table).order_by(table.c.date.desc()).limit(1)
|
|
1530
|
+
result = sess.execute(stmt).fetchone()
|
|
1531
|
+
|
|
1532
|
+
# 1. Return the date of the first day without a complete metrics record.
|
|
1533
|
+
if result is not None:
|
|
1534
|
+
if result.completed:
|
|
1535
|
+
return result._mapping["date"] + timedelta(days=1)
|
|
1536
|
+
else:
|
|
1537
|
+
return result._mapping["date"]
|
|
1538
|
+
|
|
1539
|
+
# 2. No metrics records. Return the date of the first recorded session.
|
|
1540
|
+
first_session, _ = self.get_sessions(sort_by="created_at", sort_order="asc", limit=1, deserialize=False)
|
|
1541
|
+
|
|
1542
|
+
first_session_date = first_session[0]["created_at"] if first_session else None # type: ignore[index]
|
|
1543
|
+
|
|
1544
|
+
# 3. No metrics records and no sessions records. Return None.
|
|
1545
|
+
if first_session_date is None:
|
|
1546
|
+
return None
|
|
1547
|
+
|
|
1548
|
+
return datetime.fromtimestamp(first_session_date, tz=timezone.utc).date()
|
|
1549
|
+
|
|
1550
|
+
def calculate_metrics(self) -> Optional[list[dict]]:
|
|
1551
|
+
"""Calculate metrics for all dates without complete metrics.
|
|
1552
|
+
|
|
1553
|
+
Returns:
|
|
1554
|
+
Optional[list[dict]]: The calculated metrics.
|
|
1555
|
+
|
|
1556
|
+
Raises:
|
|
1557
|
+
Exception: If an error occurs during metrics calculation.
|
|
1558
|
+
"""
|
|
1559
|
+
try:
|
|
1560
|
+
table = self._get_table(table_type="metrics", create_table_if_not_found=True)
|
|
1561
|
+
if table is None:
|
|
1562
|
+
return None
|
|
1563
|
+
|
|
1564
|
+
starting_date = self._get_metrics_calculation_starting_date(table)
|
|
1565
|
+
|
|
1566
|
+
if starting_date is None:
|
|
1567
|
+
log_info("No session data found. Won't calculate metrics.")
|
|
1568
|
+
return None
|
|
1569
|
+
|
|
1570
|
+
dates_to_process = get_dates_to_calculate_metrics_for(starting_date)
|
|
1571
|
+
if not dates_to_process:
|
|
1572
|
+
log_info("Metrics already calculated for all relevant dates.")
|
|
1573
|
+
return None
|
|
1574
|
+
|
|
1575
|
+
start_timestamp = int(
|
|
1576
|
+
datetime.combine(dates_to_process[0], datetime.min.time()).replace(tzinfo=timezone.utc).timestamp()
|
|
1577
|
+
)
|
|
1578
|
+
end_timestamp = int(
|
|
1579
|
+
datetime.combine(dates_to_process[-1] + timedelta(days=1), datetime.min.time())
|
|
1580
|
+
.replace(tzinfo=timezone.utc)
|
|
1581
|
+
.timestamp()
|
|
1582
|
+
)
|
|
1583
|
+
|
|
1584
|
+
sessions = self._get_all_sessions_for_metrics_calculation(
|
|
1585
|
+
start_timestamp=start_timestamp, end_timestamp=end_timestamp
|
|
1586
|
+
)
|
|
1587
|
+
|
|
1588
|
+
all_sessions_data = fetch_all_sessions_data(
|
|
1589
|
+
sessions=sessions, dates_to_process=dates_to_process, start_timestamp=start_timestamp
|
|
1590
|
+
)
|
|
1591
|
+
if not all_sessions_data:
|
|
1592
|
+
log_info("No new session data found. Won't calculate metrics.")
|
|
1593
|
+
return None
|
|
1594
|
+
|
|
1595
|
+
results = []
|
|
1596
|
+
metrics_records = []
|
|
1597
|
+
|
|
1598
|
+
for date_to_process in dates_to_process:
|
|
1599
|
+
date_key = date_to_process.isoformat()
|
|
1600
|
+
sessions_for_date = all_sessions_data.get(date_key, {})
|
|
1601
|
+
|
|
1602
|
+
# Skip dates with no sessions
|
|
1603
|
+
if not any(len(sessions) > 0 for sessions in sessions_for_date.values()):
|
|
1604
|
+
continue
|
|
1605
|
+
|
|
1606
|
+
metrics_record = calculate_date_metrics(date_to_process, sessions_for_date)
|
|
1607
|
+
|
|
1608
|
+
metrics_records.append(metrics_record)
|
|
1609
|
+
|
|
1610
|
+
if metrics_records:
|
|
1611
|
+
with self.Session() as sess, sess.begin():
|
|
1612
|
+
results = bulk_upsert_metrics(session=sess, table=table, metrics_records=metrics_records)
|
|
1613
|
+
|
|
1614
|
+
log_debug("Updated metrics calculations")
|
|
1615
|
+
|
|
1616
|
+
return results
|
|
1617
|
+
|
|
1618
|
+
except Exception as e:
|
|
1619
|
+
log_error(f"Exception refreshing metrics: {e}")
|
|
1620
|
+
raise e
|
|
1621
|
+
|
|
1622
|
+
def get_metrics(
|
|
1623
|
+
self,
|
|
1624
|
+
starting_date: Optional[date] = None,
|
|
1625
|
+
ending_date: Optional[date] = None,
|
|
1626
|
+
) -> Tuple[List[dict], Optional[int]]:
|
|
1627
|
+
"""Get all metrics matching the given date range.
|
|
1628
|
+
|
|
1629
|
+
Args:
|
|
1630
|
+
starting_date (Optional[date]): The starting date to filter metrics by.
|
|
1631
|
+
ending_date (Optional[date]): The ending date to filter metrics by.
|
|
1632
|
+
|
|
1633
|
+
Returns:
|
|
1634
|
+
Tuple[List[dict], Optional[int]]: A tuple containing the metrics and the timestamp of the latest update.
|
|
1635
|
+
|
|
1636
|
+
Raises:
|
|
1637
|
+
Exception: If an error occurs during retrieval.
|
|
1638
|
+
"""
|
|
1639
|
+
try:
|
|
1640
|
+
table = self._get_table(table_type="metrics", create_table_if_not_found=True)
|
|
1641
|
+
if table is None:
|
|
1642
|
+
return [], None
|
|
1643
|
+
|
|
1644
|
+
with self.Session() as sess, sess.begin():
|
|
1645
|
+
stmt = select(table)
|
|
1646
|
+
if starting_date:
|
|
1647
|
+
stmt = stmt.where(table.c.date >= starting_date)
|
|
1648
|
+
if ending_date:
|
|
1649
|
+
stmt = stmt.where(table.c.date <= ending_date)
|
|
1650
|
+
result = sess.execute(stmt).fetchall()
|
|
1651
|
+
if not result:
|
|
1652
|
+
return [], None
|
|
1653
|
+
|
|
1654
|
+
# Get the latest updated_at
|
|
1655
|
+
latest_stmt = select(func.max(table.c.updated_at))
|
|
1656
|
+
latest_updated_at = sess.execute(latest_stmt).scalar()
|
|
1657
|
+
|
|
1658
|
+
return [row._mapping for row in result], latest_updated_at
|
|
1659
|
+
|
|
1660
|
+
except Exception as e:
|
|
1661
|
+
log_error(f"Exception getting metrics: {e}")
|
|
1662
|
+
raise e
|
|
1663
|
+
|
|
1664
|
+
# -- Knowledge methods --
|
|
1665
|
+
def delete_knowledge_content(self, id: str):
|
|
1666
|
+
"""Delete a knowledge row from the database.
|
|
1667
|
+
|
|
1668
|
+
Args:
|
|
1669
|
+
id (str): The ID of the knowledge row to delete.
|
|
1670
|
+
"""
|
|
1671
|
+
try:
|
|
1672
|
+
table = self._get_table(table_type="knowledge")
|
|
1673
|
+
if table is None:
|
|
1674
|
+
return
|
|
1675
|
+
|
|
1676
|
+
with self.Session() as sess, sess.begin():
|
|
1677
|
+
stmt = table.delete().where(table.c.id == id)
|
|
1678
|
+
sess.execute(stmt)
|
|
1679
|
+
|
|
1680
|
+
except Exception as e:
|
|
1681
|
+
log_error(f"Exception deleting knowledge content: {e}")
|
|
1682
|
+
raise e
|
|
1683
|
+
|
|
1684
|
+
def get_knowledge_content(self, id: str) -> Optional[KnowledgeRow]:
|
|
1685
|
+
"""Get a knowledge row from the database.
|
|
1686
|
+
|
|
1687
|
+
Args:
|
|
1688
|
+
id (str): The ID of the knowledge row to get.
|
|
1689
|
+
|
|
1690
|
+
Returns:
|
|
1691
|
+
Optional[KnowledgeRow]: The knowledge row, or None if it doesn't exist.
|
|
1692
|
+
"""
|
|
1693
|
+
try:
|
|
1694
|
+
table = self._get_table(table_type="knowledge")
|
|
1695
|
+
if table is None:
|
|
1696
|
+
return None
|
|
1697
|
+
|
|
1698
|
+
with self.Session() as sess, sess.begin():
|
|
1699
|
+
stmt = select(table).where(table.c.id == id)
|
|
1700
|
+
result = sess.execute(stmt).fetchone()
|
|
1701
|
+
if result is None:
|
|
1702
|
+
return None
|
|
1703
|
+
|
|
1704
|
+
return KnowledgeRow.model_validate(result._mapping)
|
|
1705
|
+
|
|
1706
|
+
except Exception as e:
|
|
1707
|
+
log_error(f"Exception getting knowledge content: {e}")
|
|
1708
|
+
raise e
|
|
1709
|
+
|
|
1710
|
+
def get_knowledge_contents(
|
|
1711
|
+
self,
|
|
1712
|
+
limit: Optional[int] = None,
|
|
1713
|
+
page: Optional[int] = None,
|
|
1714
|
+
sort_by: Optional[str] = None,
|
|
1715
|
+
sort_order: Optional[str] = None,
|
|
1716
|
+
) -> Tuple[List[KnowledgeRow], int]:
|
|
1717
|
+
"""Get all knowledge contents from the database.
|
|
1718
|
+
|
|
1719
|
+
Args:
|
|
1720
|
+
limit (Optional[int]): The maximum number of knowledge contents to return.
|
|
1721
|
+
page (Optional[int]): The page number.
|
|
1722
|
+
sort_by (Optional[str]): The column to sort by.
|
|
1723
|
+
sort_order (Optional[str]): The order to sort by.
|
|
1724
|
+
create_table_if_not_found (Optional[bool]): Whether to create the table if it doesn't exist.
|
|
1725
|
+
|
|
1726
|
+
Returns:
|
|
1727
|
+
List[KnowledgeRow]: The knowledge contents.
|
|
1728
|
+
|
|
1729
|
+
Raises:
|
|
1730
|
+
Exception: If an error occurs during retrieval.
|
|
1731
|
+
"""
|
|
1732
|
+
try:
|
|
1733
|
+
table = self._get_table(table_type="knowledge")
|
|
1734
|
+
if table is None:
|
|
1735
|
+
return [], 0
|
|
1736
|
+
|
|
1737
|
+
with self.Session() as sess, sess.begin():
|
|
1738
|
+
stmt = select(table)
|
|
1739
|
+
|
|
1740
|
+
# Apply sorting
|
|
1741
|
+
if sort_by is not None:
|
|
1742
|
+
stmt = stmt.order_by(getattr(table.c, sort_by) * (1 if sort_order == "asc" else -1))
|
|
1743
|
+
|
|
1744
|
+
# Get total count before applying limit and pagination
|
|
1745
|
+
count_stmt = select(func.count()).select_from(stmt.alias())
|
|
1746
|
+
total_count = sess.execute(count_stmt).scalar()
|
|
1747
|
+
|
|
1748
|
+
# Apply pagination after count
|
|
1749
|
+
if limit is not None:
|
|
1750
|
+
stmt = stmt.limit(limit)
|
|
1751
|
+
if page is not None:
|
|
1752
|
+
stmt = stmt.offset((page - 1) * limit)
|
|
1753
|
+
|
|
1754
|
+
result = sess.execute(stmt).fetchall()
|
|
1755
|
+
return [KnowledgeRow.model_validate(record._mapping) for record in result], total_count
|
|
1756
|
+
|
|
1757
|
+
except Exception as e:
|
|
1758
|
+
log_error(f"Exception getting knowledge contents: {e}")
|
|
1759
|
+
raise e
|
|
1760
|
+
|
|
1761
|
+
def upsert_knowledge_content(self, knowledge_row: KnowledgeRow):
|
|
1762
|
+
"""Upsert knowledge content in the database.
|
|
1763
|
+
|
|
1764
|
+
Args:
|
|
1765
|
+
knowledge_row (KnowledgeRow): The knowledge row to upsert.
|
|
1766
|
+
|
|
1767
|
+
Returns:
|
|
1768
|
+
Optional[KnowledgeRow]: The upserted knowledge row, or None if the operation fails.
|
|
1769
|
+
"""
|
|
1770
|
+
try:
|
|
1771
|
+
table = self._get_table(table_type="knowledge", create_table_if_not_found=True)
|
|
1772
|
+
if table is None:
|
|
1773
|
+
return None
|
|
1774
|
+
|
|
1775
|
+
with self.Session() as sess, sess.begin():
|
|
1776
|
+
# Get the actual table columns to avoid "unconsumed column names" error
|
|
1777
|
+
table_columns = set(table.columns.keys())
|
|
1778
|
+
|
|
1779
|
+
# Only include fields that exist in the table and are not None
|
|
1780
|
+
insert_data = {}
|
|
1781
|
+
update_fields = {}
|
|
1782
|
+
|
|
1783
|
+
# Map of KnowledgeRow fields to table columns
|
|
1784
|
+
field_mapping = {
|
|
1785
|
+
"id": "id",
|
|
1786
|
+
"name": "name",
|
|
1787
|
+
"description": "description",
|
|
1788
|
+
"metadata": "metadata",
|
|
1789
|
+
"type": "type",
|
|
1790
|
+
"size": "size",
|
|
1791
|
+
"linked_to": "linked_to",
|
|
1792
|
+
"access_count": "access_count",
|
|
1793
|
+
"status": "status",
|
|
1794
|
+
"status_message": "status_message",
|
|
1795
|
+
"created_at": "created_at",
|
|
1796
|
+
"updated_at": "updated_at",
|
|
1797
|
+
"external_id": "external_id",
|
|
1798
|
+
}
|
|
1799
|
+
|
|
1800
|
+
# Build insert and update data only for fields that exist in the table
|
|
1801
|
+
for model_field, table_column in field_mapping.items():
|
|
1802
|
+
if table_column in table_columns:
|
|
1803
|
+
value = getattr(knowledge_row, model_field, None)
|
|
1804
|
+
if value is not None:
|
|
1805
|
+
insert_data[table_column] = value
|
|
1806
|
+
# Don't include ID in update_fields since it's the primary key
|
|
1807
|
+
if table_column != "id":
|
|
1808
|
+
update_fields[table_column] = value
|
|
1809
|
+
|
|
1810
|
+
# Ensure id is always included for the insert
|
|
1811
|
+
if "id" in table_columns and knowledge_row.id:
|
|
1812
|
+
insert_data["id"] = knowledge_row.id
|
|
1813
|
+
|
|
1814
|
+
# Handle case where update_fields is empty (all fields are None or don't exist in table)
|
|
1815
|
+
if not update_fields:
|
|
1816
|
+
# If we have insert_data, just do an insert without conflict resolution
|
|
1817
|
+
if insert_data:
|
|
1818
|
+
stmt = postgresql.insert(table).values(insert_data)
|
|
1819
|
+
sess.execute(stmt)
|
|
1820
|
+
else:
|
|
1821
|
+
# If we have no data at all, this is an error
|
|
1822
|
+
log_error("No valid fields found for knowledge row upsert")
|
|
1823
|
+
return None
|
|
1824
|
+
else:
|
|
1825
|
+
# Normal upsert with conflict resolution
|
|
1826
|
+
stmt = (
|
|
1827
|
+
postgresql.insert(table)
|
|
1828
|
+
.values(insert_data)
|
|
1829
|
+
.on_conflict_do_update(index_elements=["id"], set_=update_fields)
|
|
1830
|
+
)
|
|
1831
|
+
sess.execute(stmt)
|
|
1832
|
+
|
|
1833
|
+
return knowledge_row
|
|
1834
|
+
|
|
1835
|
+
except Exception as e:
|
|
1836
|
+
log_error(f"Error upserting knowledge row: {e}")
|
|
1837
|
+
raise e
|
|
1838
|
+
|
|
1839
|
+
# -- Eval methods --
|
|
1840
|
+
def create_eval_run(self, eval_run: EvalRunRecord) -> Optional[EvalRunRecord]:
|
|
1841
|
+
"""Create an EvalRunRecord in the database.
|
|
1842
|
+
|
|
1843
|
+
Args:
|
|
1844
|
+
eval_run (EvalRunRecord): The eval run to create.
|
|
1845
|
+
|
|
1846
|
+
Returns:
|
|
1847
|
+
Optional[EvalRunRecord]: The created eval run, or None if the operation fails.
|
|
1848
|
+
|
|
1849
|
+
Raises:
|
|
1850
|
+
Exception: If an error occurs during creation.
|
|
1851
|
+
"""
|
|
1852
|
+
try:
|
|
1853
|
+
table = self._get_table(table_type="evals", create_table_if_not_found=True)
|
|
1854
|
+
if table is None:
|
|
1855
|
+
return None
|
|
1856
|
+
|
|
1857
|
+
with self.Session() as sess, sess.begin():
|
|
1858
|
+
current_time = int(time.time())
|
|
1859
|
+
stmt = postgresql.insert(table).values(
|
|
1860
|
+
{"created_at": current_time, "updated_at": current_time, **eval_run.model_dump()}
|
|
1861
|
+
)
|
|
1862
|
+
sess.execute(stmt)
|
|
1863
|
+
|
|
1864
|
+
log_debug(f"Created eval run with id '{eval_run.run_id}'")
|
|
1865
|
+
|
|
1866
|
+
return eval_run
|
|
1867
|
+
|
|
1868
|
+
except Exception as e:
|
|
1869
|
+
log_error(f"Error creating eval run: {e}")
|
|
1870
|
+
raise e
|
|
1871
|
+
|
|
1872
|
+
def delete_eval_run(self, eval_run_id: str) -> None:
|
|
1873
|
+
"""Delete an eval run from the database.
|
|
1874
|
+
|
|
1875
|
+
Args:
|
|
1876
|
+
eval_run_id (str): The ID of the eval run to delete.
|
|
1877
|
+
"""
|
|
1878
|
+
try:
|
|
1879
|
+
table = self._get_table(table_type="evals")
|
|
1880
|
+
if table is None:
|
|
1881
|
+
return
|
|
1882
|
+
|
|
1883
|
+
with self.Session() as sess, sess.begin():
|
|
1884
|
+
stmt = table.delete().where(table.c.run_id == eval_run_id)
|
|
1885
|
+
result = sess.execute(stmt)
|
|
1886
|
+
|
|
1887
|
+
if result.rowcount == 0:
|
|
1888
|
+
log_warning(f"No eval run found with ID: {eval_run_id}")
|
|
1889
|
+
else:
|
|
1890
|
+
log_debug(f"Deleted eval run with ID: {eval_run_id}")
|
|
1891
|
+
|
|
1892
|
+
except Exception as e:
|
|
1893
|
+
log_error(f"Error deleting eval run {eval_run_id}: {e}")
|
|
1894
|
+
raise e
|
|
1895
|
+
|
|
1896
|
+
def delete_eval_runs(self, eval_run_ids: List[str]) -> None:
|
|
1897
|
+
"""Delete multiple eval runs from the database.
|
|
1898
|
+
|
|
1899
|
+
Args:
|
|
1900
|
+
eval_run_ids (List[str]): List of eval run IDs to delete.
|
|
1901
|
+
"""
|
|
1902
|
+
try:
|
|
1903
|
+
table = self._get_table(table_type="evals")
|
|
1904
|
+
if table is None:
|
|
1905
|
+
return
|
|
1906
|
+
|
|
1907
|
+
with self.Session() as sess, sess.begin():
|
|
1908
|
+
stmt = table.delete().where(table.c.run_id.in_(eval_run_ids))
|
|
1909
|
+
result = sess.execute(stmt)
|
|
1910
|
+
|
|
1911
|
+
if result.rowcount == 0:
|
|
1912
|
+
log_warning(f"No eval runs found with IDs: {eval_run_ids}")
|
|
1913
|
+
else:
|
|
1914
|
+
log_debug(f"Deleted {result.rowcount} eval runs")
|
|
1915
|
+
|
|
1916
|
+
except Exception as e:
|
|
1917
|
+
log_error(f"Error deleting eval runs {eval_run_ids}: {e}")
|
|
1918
|
+
raise e
|
|
1919
|
+
|
|
1920
|
+
def get_eval_run(
|
|
1921
|
+
self, eval_run_id: str, deserialize: Optional[bool] = True
|
|
1922
|
+
) -> Optional[Union[EvalRunRecord, Dict[str, Any]]]:
|
|
1923
|
+
"""Get an eval run from the database.
|
|
1924
|
+
|
|
1925
|
+
Args:
|
|
1926
|
+
eval_run_id (str): The ID of the eval run to get.
|
|
1927
|
+
deserialize (Optional[bool]): Whether to serialize the eval run. Defaults to True.
|
|
1928
|
+
|
|
1929
|
+
Returns:
|
|
1930
|
+
Optional[Union[EvalRunRecord, Dict[str, Any]]]:
|
|
1931
|
+
- When deserialize=True: EvalRunRecord object
|
|
1932
|
+
- When deserialize=False: EvalRun dictionary
|
|
1933
|
+
|
|
1934
|
+
Raises:
|
|
1935
|
+
Exception: If an error occurs during retrieval.
|
|
1936
|
+
"""
|
|
1937
|
+
try:
|
|
1938
|
+
table = self._get_table(table_type="evals")
|
|
1939
|
+
if table is None:
|
|
1940
|
+
return None
|
|
1941
|
+
|
|
1942
|
+
with self.Session() as sess, sess.begin():
|
|
1943
|
+
stmt = select(table).where(table.c.run_id == eval_run_id)
|
|
1944
|
+
result = sess.execute(stmt).fetchone()
|
|
1945
|
+
if result is None:
|
|
1946
|
+
return None
|
|
1947
|
+
|
|
1948
|
+
eval_run_raw = dict(result._mapping)
|
|
1949
|
+
if not deserialize:
|
|
1950
|
+
return eval_run_raw
|
|
1951
|
+
|
|
1952
|
+
return EvalRunRecord.model_validate(eval_run_raw)
|
|
1953
|
+
|
|
1954
|
+
except Exception as e:
|
|
1955
|
+
log_error(f"Exception getting eval run {eval_run_id}: {e}")
|
|
1956
|
+
raise e
|
|
1957
|
+
|
|
1958
|
+
def get_eval_runs(
|
|
1959
|
+
self,
|
|
1960
|
+
limit: Optional[int] = None,
|
|
1961
|
+
page: Optional[int] = None,
|
|
1962
|
+
sort_by: Optional[str] = None,
|
|
1963
|
+
sort_order: Optional[str] = None,
|
|
1964
|
+
agent_id: Optional[str] = None,
|
|
1965
|
+
team_id: Optional[str] = None,
|
|
1966
|
+
workflow_id: Optional[str] = None,
|
|
1967
|
+
model_id: Optional[str] = None,
|
|
1968
|
+
filter_type: Optional[EvalFilterType] = None,
|
|
1969
|
+
eval_type: Optional[List[EvalType]] = None,
|
|
1970
|
+
deserialize: Optional[bool] = True,
|
|
1971
|
+
) -> Union[List[EvalRunRecord], Tuple[List[Dict[str, Any]], int]]:
|
|
1972
|
+
"""Get all eval runs from the database.
|
|
1973
|
+
|
|
1974
|
+
Args:
|
|
1975
|
+
limit (Optional[int]): The maximum number of eval runs to return.
|
|
1976
|
+
page (Optional[int]): The page number.
|
|
1977
|
+
sort_by (Optional[str]): The column to sort by.
|
|
1978
|
+
sort_order (Optional[str]): The order to sort by.
|
|
1979
|
+
agent_id (Optional[str]): The ID of the agent to filter by.
|
|
1980
|
+
team_id (Optional[str]): The ID of the team to filter by.
|
|
1981
|
+
workflow_id (Optional[str]): The ID of the workflow to filter by.
|
|
1982
|
+
model_id (Optional[str]): The ID of the model to filter by.
|
|
1983
|
+
eval_type (Optional[List[EvalType]]): The type(s) of eval to filter by.
|
|
1984
|
+
filter_type (Optional[EvalFilterType]): Filter by component type (agent, team, workflow).
|
|
1985
|
+
deserialize (Optional[bool]): Whether to serialize the eval runs. Defaults to True.
|
|
1986
|
+
create_table_if_not_found (Optional[bool]): Whether to create the table if it doesn't exist.
|
|
1987
|
+
|
|
1988
|
+
Returns:
|
|
1989
|
+
Union[List[EvalRunRecord], Tuple[List[Dict[str, Any]], int]]:
|
|
1990
|
+
- When deserialize=True: List of EvalRunRecord objects
|
|
1991
|
+
- When deserialize=False: List of dictionaries
|
|
1992
|
+
|
|
1993
|
+
Raises:
|
|
1994
|
+
Exception: If an error occurs during retrieval.
|
|
1995
|
+
"""
|
|
1996
|
+
try:
|
|
1997
|
+
table = self._get_table(table_type="evals")
|
|
1998
|
+
if table is None:
|
|
1999
|
+
return [] if deserialize else ([], 0)
|
|
2000
|
+
|
|
2001
|
+
with self.Session() as sess, sess.begin():
|
|
2002
|
+
stmt = select(table)
|
|
2003
|
+
|
|
2004
|
+
# Filtering
|
|
2005
|
+
if agent_id is not None:
|
|
2006
|
+
stmt = stmt.where(table.c.agent_id == agent_id)
|
|
2007
|
+
if team_id is not None:
|
|
2008
|
+
stmt = stmt.where(table.c.team_id == team_id)
|
|
2009
|
+
if workflow_id is not None:
|
|
2010
|
+
stmt = stmt.where(table.c.workflow_id == workflow_id)
|
|
2011
|
+
if model_id is not None:
|
|
2012
|
+
stmt = stmt.where(table.c.model_id == model_id)
|
|
2013
|
+
if eval_type is not None and len(eval_type) > 0:
|
|
2014
|
+
stmt = stmt.where(table.c.eval_type.in_(eval_type))
|
|
2015
|
+
if filter_type is not None:
|
|
2016
|
+
if filter_type == EvalFilterType.AGENT:
|
|
2017
|
+
stmt = stmt.where(table.c.agent_id.is_not(None))
|
|
2018
|
+
elif filter_type == EvalFilterType.TEAM:
|
|
2019
|
+
stmt = stmt.where(table.c.team_id.is_not(None))
|
|
2020
|
+
elif filter_type == EvalFilterType.WORKFLOW:
|
|
2021
|
+
stmt = stmt.where(table.c.workflow_id.is_not(None))
|
|
2022
|
+
|
|
2023
|
+
# Get total count after applying filtering
|
|
2024
|
+
count_stmt = select(func.count()).select_from(stmt.alias())
|
|
2025
|
+
total_count = sess.execute(count_stmt).scalar()
|
|
2026
|
+
|
|
2027
|
+
# Sorting
|
|
2028
|
+
if sort_by is None:
|
|
2029
|
+
stmt = stmt.order_by(table.c.created_at.desc())
|
|
2030
|
+
else:
|
|
2031
|
+
stmt = apply_sorting(stmt, table, sort_by, sort_order)
|
|
2032
|
+
|
|
2033
|
+
# Paginating
|
|
2034
|
+
if limit is not None:
|
|
2035
|
+
stmt = stmt.limit(limit)
|
|
2036
|
+
if page is not None:
|
|
2037
|
+
stmt = stmt.offset((page - 1) * limit)
|
|
2038
|
+
|
|
2039
|
+
result = sess.execute(stmt).fetchall()
|
|
2040
|
+
if not result:
|
|
2041
|
+
return [] if deserialize else ([], 0)
|
|
2042
|
+
|
|
2043
|
+
eval_runs_raw = [row._mapping for row in result]
|
|
2044
|
+
if not deserialize:
|
|
2045
|
+
return eval_runs_raw, total_count
|
|
2046
|
+
|
|
2047
|
+
return [EvalRunRecord.model_validate(row) for row in eval_runs_raw]
|
|
2048
|
+
|
|
2049
|
+
except Exception as e:
|
|
2050
|
+
log_error(f"Exception getting eval runs: {e}")
|
|
2051
|
+
raise e
|
|
2052
|
+
|
|
2053
|
+
def rename_eval_run(
|
|
2054
|
+
self, eval_run_id: str, name: str, deserialize: Optional[bool] = True
|
|
2055
|
+
) -> Optional[Union[EvalRunRecord, Dict[str, Any]]]:
|
|
2056
|
+
"""Upsert the name of an eval run in the database, returning raw dictionary.
|
|
2057
|
+
|
|
2058
|
+
Args:
|
|
2059
|
+
eval_run_id (str): The ID of the eval run to update.
|
|
2060
|
+
name (str): The new name of the eval run.
|
|
2061
|
+
|
|
2062
|
+
Returns:
|
|
2063
|
+
Optional[Dict[str, Any]]: The updated eval run, or None if the operation fails.
|
|
2064
|
+
|
|
2065
|
+
Raises:
|
|
2066
|
+
Exception: If an error occurs during update.
|
|
2067
|
+
"""
|
|
2068
|
+
try:
|
|
2069
|
+
table = self._get_table(table_type="evals")
|
|
2070
|
+
if table is None:
|
|
2071
|
+
return None
|
|
2072
|
+
|
|
2073
|
+
with self.Session() as sess, sess.begin():
|
|
2074
|
+
stmt = (
|
|
2075
|
+
table.update().where(table.c.run_id == eval_run_id).values(name=name, updated_at=int(time.time()))
|
|
2076
|
+
)
|
|
2077
|
+
sess.execute(stmt)
|
|
2078
|
+
|
|
2079
|
+
eval_run_raw = self.get_eval_run(eval_run_id=eval_run_id, deserialize=deserialize)
|
|
2080
|
+
if not eval_run_raw or not deserialize:
|
|
2081
|
+
return eval_run_raw
|
|
2082
|
+
|
|
2083
|
+
return EvalRunRecord.model_validate(eval_run_raw)
|
|
2084
|
+
|
|
2085
|
+
except Exception as e:
|
|
2086
|
+
log_error(f"Error upserting eval run name {eval_run_id}: {e}")
|
|
2087
|
+
raise e
|
|
2088
|
+
|
|
2089
|
+
# -- Culture methods --
|
|
2090
|
+
|
|
2091
|
+
def clear_cultural_knowledge(self) -> None:
|
|
2092
|
+
"""Delete all cultural knowledge from the database.
|
|
2093
|
+
|
|
2094
|
+
Raises:
|
|
2095
|
+
Exception: If an error occurs during deletion.
|
|
2096
|
+
"""
|
|
2097
|
+
try:
|
|
2098
|
+
table = self._get_table(table_type="culture")
|
|
2099
|
+
if table is None:
|
|
2100
|
+
return
|
|
2101
|
+
|
|
2102
|
+
with self.Session() as sess, sess.begin():
|
|
2103
|
+
sess.execute(table.delete())
|
|
2104
|
+
|
|
2105
|
+
except Exception as e:
|
|
2106
|
+
log_warning(f"Exception deleting all cultural knowledge: {e}")
|
|
2107
|
+
raise e
|
|
2108
|
+
|
|
2109
|
+
def delete_cultural_knowledge(self, id: str) -> None:
|
|
2110
|
+
"""Delete a cultural knowledge entry from the database.
|
|
2111
|
+
|
|
2112
|
+
Args:
|
|
2113
|
+
id (str): The ID of the cultural knowledge to delete.
|
|
2114
|
+
|
|
2115
|
+
Raises:
|
|
2116
|
+
Exception: If an error occurs during deletion.
|
|
2117
|
+
"""
|
|
2118
|
+
try:
|
|
2119
|
+
table = self._get_table(table_type="culture")
|
|
2120
|
+
if table is None:
|
|
2121
|
+
return
|
|
2122
|
+
|
|
2123
|
+
with self.Session() as sess, sess.begin():
|
|
2124
|
+
delete_stmt = table.delete().where(table.c.id == id)
|
|
2125
|
+
result = sess.execute(delete_stmt)
|
|
2126
|
+
|
|
2127
|
+
success = result.rowcount > 0
|
|
2128
|
+
if success:
|
|
2129
|
+
log_debug(f"Successfully deleted cultural knowledge id: {id}")
|
|
2130
|
+
else:
|
|
2131
|
+
log_debug(f"No cultural knowledge found with id: {id}")
|
|
2132
|
+
|
|
2133
|
+
except Exception as e:
|
|
2134
|
+
log_error(f"Error deleting cultural knowledge: {e}")
|
|
2135
|
+
raise e
|
|
2136
|
+
|
|
2137
|
+
def get_cultural_knowledge(
|
|
2138
|
+
self, id: str, deserialize: Optional[bool] = True
|
|
2139
|
+
) -> Optional[Union[CulturalKnowledge, Dict[str, Any]]]:
|
|
2140
|
+
"""Get a cultural knowledge entry from the database.
|
|
2141
|
+
|
|
2142
|
+
Args:
|
|
2143
|
+
id (str): The ID of the cultural knowledge to get.
|
|
2144
|
+
deserialize (Optional[bool]): Whether to deserialize the cultural knowledge. Defaults to True.
|
|
2145
|
+
|
|
2146
|
+
Returns:
|
|
2147
|
+
Optional[Union[CulturalKnowledge, Dict[str, Any]]]: The cultural knowledge entry, or None if it doesn't exist.
|
|
2148
|
+
|
|
2149
|
+
Raises:
|
|
2150
|
+
Exception: If an error occurs during retrieval.
|
|
2151
|
+
"""
|
|
2152
|
+
try:
|
|
2153
|
+
table = self._get_table(table_type="culture")
|
|
2154
|
+
if table is None:
|
|
2155
|
+
return None
|
|
2156
|
+
|
|
2157
|
+
with self.Session() as sess, sess.begin():
|
|
2158
|
+
stmt = select(table).where(table.c.id == id)
|
|
2159
|
+
result = sess.execute(stmt).fetchone()
|
|
2160
|
+
if result is None:
|
|
2161
|
+
return None
|
|
2162
|
+
|
|
2163
|
+
db_row = dict(result._mapping)
|
|
2164
|
+
if not db_row or not deserialize:
|
|
2165
|
+
return db_row
|
|
2166
|
+
|
|
2167
|
+
return deserialize_cultural_knowledge(db_row)
|
|
2168
|
+
|
|
2169
|
+
except Exception as e:
|
|
2170
|
+
log_error(f"Exception reading from cultural knowledge table: {e}")
|
|
2171
|
+
raise e
|
|
2172
|
+
|
|
2173
|
+
def get_all_cultural_knowledge(
|
|
2174
|
+
self,
|
|
2175
|
+
name: Optional[str] = None,
|
|
2176
|
+
agent_id: Optional[str] = None,
|
|
2177
|
+
team_id: Optional[str] = None,
|
|
2178
|
+
limit: Optional[int] = None,
|
|
2179
|
+
page: Optional[int] = None,
|
|
2180
|
+
sort_by: Optional[str] = None,
|
|
2181
|
+
sort_order: Optional[str] = None,
|
|
2182
|
+
deserialize: Optional[bool] = True,
|
|
2183
|
+
) -> Union[List[CulturalKnowledge], Tuple[List[Dict[str, Any]], int]]:
|
|
2184
|
+
"""Get all cultural knowledge from the database as CulturalKnowledge objects.
|
|
2185
|
+
|
|
2186
|
+
Args:
|
|
2187
|
+
name (Optional[str]): The name of the cultural knowledge to filter by.
|
|
2188
|
+
agent_id (Optional[str]): The ID of the agent to filter by.
|
|
2189
|
+
team_id (Optional[str]): The ID of the team to filter by.
|
|
2190
|
+
limit (Optional[int]): The maximum number of cultural knowledge entries to return.
|
|
2191
|
+
page (Optional[int]): The page number.
|
|
2192
|
+
sort_by (Optional[str]): The column to sort by.
|
|
2193
|
+
sort_order (Optional[str]): The order to sort by.
|
|
2194
|
+
deserialize (Optional[bool]): Whether to deserialize the cultural knowledge. Defaults to True.
|
|
2195
|
+
|
|
2196
|
+
Returns:
|
|
2197
|
+
Union[List[CulturalKnowledge], Tuple[List[Dict[str, Any]], int]]:
|
|
2198
|
+
- When deserialize=True: List of CulturalKnowledge objects
|
|
2199
|
+
- When deserialize=False: List of CulturalKnowledge dictionaries and total count
|
|
2200
|
+
|
|
2201
|
+
Raises:
|
|
2202
|
+
Exception: If an error occurs during retrieval.
|
|
2203
|
+
"""
|
|
2204
|
+
try:
|
|
2205
|
+
table = self._get_table(table_type="culture")
|
|
2206
|
+
if table is None:
|
|
2207
|
+
return [] if deserialize else ([], 0)
|
|
2208
|
+
|
|
2209
|
+
with self.Session() as sess, sess.begin():
|
|
2210
|
+
stmt = select(table)
|
|
2211
|
+
|
|
2212
|
+
# Filtering
|
|
2213
|
+
if name is not None:
|
|
2214
|
+
stmt = stmt.where(table.c.name == name)
|
|
2215
|
+
if agent_id is not None:
|
|
2216
|
+
stmt = stmt.where(table.c.agent_id == agent_id)
|
|
2217
|
+
if team_id is not None:
|
|
2218
|
+
stmt = stmt.where(table.c.team_id == team_id)
|
|
2219
|
+
|
|
2220
|
+
# Get total count after applying filtering
|
|
2221
|
+
count_stmt = select(func.count()).select_from(stmt.alias())
|
|
2222
|
+
total_count = sess.execute(count_stmt).scalar()
|
|
2223
|
+
|
|
2224
|
+
# Sorting
|
|
2225
|
+
stmt = apply_sorting(stmt, table, sort_by, sort_order)
|
|
2226
|
+
# Paginating
|
|
2227
|
+
if limit is not None:
|
|
2228
|
+
stmt = stmt.limit(limit)
|
|
2229
|
+
if page is not None:
|
|
2230
|
+
stmt = stmt.offset((page - 1) * limit)
|
|
2231
|
+
|
|
2232
|
+
result = sess.execute(stmt).fetchall()
|
|
2233
|
+
if not result:
|
|
2234
|
+
return [] if deserialize else ([], 0)
|
|
2235
|
+
|
|
2236
|
+
db_rows = [dict(record._mapping) for record in result]
|
|
2237
|
+
|
|
2238
|
+
if not deserialize:
|
|
2239
|
+
return db_rows, total_count
|
|
2240
|
+
|
|
2241
|
+
return [deserialize_cultural_knowledge(row) for row in db_rows]
|
|
2242
|
+
|
|
2243
|
+
except Exception as e:
|
|
2244
|
+
log_error(f"Error reading from cultural knowledge table: {e}")
|
|
2245
|
+
raise e
|
|
2246
|
+
|
|
2247
|
+
def upsert_cultural_knowledge(
|
|
2248
|
+
self, cultural_knowledge: CulturalKnowledge, deserialize: Optional[bool] = True
|
|
2249
|
+
) -> Optional[Union[CulturalKnowledge, Dict[str, Any]]]:
|
|
2250
|
+
"""Upsert a cultural knowledge entry into the database.
|
|
2251
|
+
|
|
2252
|
+
Args:
|
|
2253
|
+
cultural_knowledge (CulturalKnowledge): The cultural knowledge to upsert.
|
|
2254
|
+
deserialize (Optional[bool]): Whether to deserialize the cultural knowledge. Defaults to True.
|
|
2255
|
+
|
|
2256
|
+
Returns:
|
|
2257
|
+
Optional[CulturalKnowledge]: The upserted cultural knowledge entry.
|
|
2258
|
+
|
|
2259
|
+
Raises:
|
|
2260
|
+
Exception: If an error occurs during upsert.
|
|
2261
|
+
"""
|
|
2262
|
+
try:
|
|
2263
|
+
table = self._get_table(table_type="culture", create_table_if_not_found=True)
|
|
2264
|
+
if table is None:
|
|
2265
|
+
return None
|
|
2266
|
+
|
|
2267
|
+
if cultural_knowledge.id is None:
|
|
2268
|
+
cultural_knowledge.id = str(uuid4())
|
|
2269
|
+
|
|
2270
|
+
# Serialize content, categories, and notes into a JSON dict for DB storage
|
|
2271
|
+
content_dict = serialize_cultural_knowledge(cultural_knowledge)
|
|
2272
|
+
|
|
2273
|
+
with self.Session() as sess, sess.begin():
|
|
2274
|
+
stmt = postgresql.insert(table).values(
|
|
2275
|
+
id=cultural_knowledge.id,
|
|
2276
|
+
name=cultural_knowledge.name,
|
|
2277
|
+
summary=cultural_knowledge.summary,
|
|
2278
|
+
content=content_dict if content_dict else None,
|
|
2279
|
+
metadata=cultural_knowledge.metadata,
|
|
2280
|
+
input=cultural_knowledge.input,
|
|
2281
|
+
created_at=cultural_knowledge.created_at,
|
|
2282
|
+
updated_at=int(time.time()),
|
|
2283
|
+
agent_id=cultural_knowledge.agent_id,
|
|
2284
|
+
team_id=cultural_knowledge.team_id,
|
|
2285
|
+
)
|
|
2286
|
+
stmt = stmt.on_conflict_do_update( # type: ignore
|
|
2287
|
+
index_elements=["id"],
|
|
2288
|
+
set_=dict(
|
|
2289
|
+
name=cultural_knowledge.name,
|
|
2290
|
+
summary=cultural_knowledge.summary,
|
|
2291
|
+
content=content_dict if content_dict else None,
|
|
2292
|
+
metadata=cultural_knowledge.metadata,
|
|
2293
|
+
input=cultural_knowledge.input,
|
|
2294
|
+
updated_at=int(time.time()),
|
|
2295
|
+
agent_id=cultural_knowledge.agent_id,
|
|
2296
|
+
team_id=cultural_knowledge.team_id,
|
|
2297
|
+
),
|
|
2298
|
+
).returning(table)
|
|
2299
|
+
|
|
2300
|
+
result = sess.execute(stmt)
|
|
2301
|
+
row = result.fetchone()
|
|
2302
|
+
|
|
2303
|
+
if row is None:
|
|
2304
|
+
return None
|
|
2305
|
+
|
|
2306
|
+
db_row = dict(row._mapping)
|
|
2307
|
+
if not db_row or not deserialize:
|
|
2308
|
+
return db_row
|
|
2309
|
+
|
|
2310
|
+
return deserialize_cultural_knowledge(db_row)
|
|
2311
|
+
|
|
2312
|
+
except Exception as e:
|
|
2313
|
+
log_error(f"Error upserting cultural knowledge: {e}")
|
|
2314
|
+
raise e
|
|
2315
|
+
|
|
2316
|
+
# -- Migrations --
|
|
2317
|
+
|
|
2318
|
+
def migrate_table_from_v1_to_v2(self, v1_db_schema: str, v1_table_name: str, v1_table_type: str):
|
|
2319
|
+
"""Migrate all content in the given table to the right v2 table"""
|
|
2320
|
+
|
|
2321
|
+
from agno.db.migrations.v1_to_v2 import (
|
|
2322
|
+
get_all_table_content,
|
|
2323
|
+
parse_agent_sessions,
|
|
2324
|
+
parse_memories,
|
|
2325
|
+
parse_team_sessions,
|
|
2326
|
+
parse_workflow_sessions,
|
|
2327
|
+
)
|
|
2328
|
+
|
|
2329
|
+
# Get all content from the old table
|
|
2330
|
+
old_content: list[dict[str, Any]] = get_all_table_content(
|
|
2331
|
+
db=self,
|
|
2332
|
+
db_schema=v1_db_schema,
|
|
2333
|
+
table_name=v1_table_name,
|
|
2334
|
+
)
|
|
2335
|
+
if not old_content:
|
|
2336
|
+
log_info(f"No content to migrate from table {v1_table_name}")
|
|
2337
|
+
return
|
|
2338
|
+
|
|
2339
|
+
# Parse the content into the new format
|
|
2340
|
+
memories: List[UserMemory] = []
|
|
2341
|
+
sessions: Sequence[Union[AgentSession, TeamSession, WorkflowSession]] = []
|
|
2342
|
+
if v1_table_type == "agent_sessions":
|
|
2343
|
+
sessions = parse_agent_sessions(old_content)
|
|
2344
|
+
elif v1_table_type == "team_sessions":
|
|
2345
|
+
sessions = parse_team_sessions(old_content)
|
|
2346
|
+
elif v1_table_type == "workflow_sessions":
|
|
2347
|
+
sessions = parse_workflow_sessions(old_content)
|
|
2348
|
+
elif v1_table_type == "memories":
|
|
2349
|
+
memories = parse_memories(old_content)
|
|
2350
|
+
else:
|
|
2351
|
+
raise ValueError(f"Invalid table type: {v1_table_type}")
|
|
2352
|
+
|
|
2353
|
+
# Insert the new content into the new table
|
|
2354
|
+
if v1_table_type == "agent_sessions":
|
|
2355
|
+
for session in sessions:
|
|
2356
|
+
self.upsert_session(session)
|
|
2357
|
+
log_info(f"Migrated {len(sessions)} Agent sessions to table: {self.session_table_name}")
|
|
2358
|
+
|
|
2359
|
+
elif v1_table_type == "team_sessions":
|
|
2360
|
+
for session in sessions:
|
|
2361
|
+
self.upsert_session(session)
|
|
2362
|
+
log_info(f"Migrated {len(sessions)} Team sessions to table: {self.session_table_name}")
|
|
2363
|
+
|
|
2364
|
+
elif v1_table_type == "workflow_sessions":
|
|
2365
|
+
for session in sessions:
|
|
2366
|
+
self.upsert_session(session)
|
|
2367
|
+
log_info(f"Migrated {len(sessions)} Workflow sessions to table: {self.session_table_name}")
|
|
2368
|
+
|
|
2369
|
+
elif v1_table_type == "memories":
|
|
2370
|
+
for memory in memories:
|
|
2371
|
+
self.upsert_user_memory(memory)
|
|
2372
|
+
log_info(f"Migrated {len(memories)} memories to table: {self.memory_table}")
|
|
2373
|
+
|
|
2374
|
+
# --- Traces ---
|
|
2375
|
+
def _get_traces_base_query(self, table: Table, spans_table: Optional[Table] = None):
|
|
2376
|
+
"""Build base query for traces with aggregated span counts.
|
|
2377
|
+
|
|
2378
|
+
Args:
|
|
2379
|
+
table: The traces table.
|
|
2380
|
+
spans_table: The spans table (optional).
|
|
2381
|
+
|
|
2382
|
+
Returns:
|
|
2383
|
+
SQLAlchemy select statement with total_spans and error_count calculated dynamically.
|
|
2384
|
+
"""
|
|
2385
|
+
from sqlalchemy import case, literal
|
|
2386
|
+
|
|
2387
|
+
if spans_table is not None:
|
|
2388
|
+
# JOIN with spans table to calculate total_spans and error_count
|
|
2389
|
+
return (
|
|
2390
|
+
select(
|
|
2391
|
+
table,
|
|
2392
|
+
func.coalesce(func.count(spans_table.c.span_id), 0).label("total_spans"),
|
|
2393
|
+
func.coalesce(func.sum(case((spans_table.c.status_code == "ERROR", 1), else_=0)), 0).label(
|
|
2394
|
+
"error_count"
|
|
2395
|
+
),
|
|
2396
|
+
)
|
|
2397
|
+
.select_from(table.outerjoin(spans_table, table.c.trace_id == spans_table.c.trace_id))
|
|
2398
|
+
.group_by(table.c.trace_id)
|
|
2399
|
+
)
|
|
2400
|
+
else:
|
|
2401
|
+
# Fallback if spans table doesn't exist
|
|
2402
|
+
return select(table, literal(0).label("total_spans"), literal(0).label("error_count"))
|
|
2403
|
+
|
|
2404
|
+
def _get_trace_component_level_expr(self, workflow_id_col, team_id_col, agent_id_col, name_col):
|
|
2405
|
+
"""Build a SQL CASE expression that returns the component level for a trace.
|
|
2406
|
+
|
|
2407
|
+
Component levels (higher = more important):
|
|
2408
|
+
- 3: Workflow root (.run or .arun with workflow_id)
|
|
2409
|
+
- 2: Team root (.run or .arun with team_id)
|
|
2410
|
+
- 1: Agent root (.run or .arun with agent_id)
|
|
2411
|
+
- 0: Child span (not a root)
|
|
2412
|
+
|
|
2413
|
+
Args:
|
|
2414
|
+
workflow_id_col: SQL column/expression for workflow_id
|
|
2415
|
+
team_id_col: SQL column/expression for team_id
|
|
2416
|
+
agent_id_col: SQL column/expression for agent_id
|
|
2417
|
+
name_col: SQL column/expression for name
|
|
2418
|
+
|
|
2419
|
+
Returns:
|
|
2420
|
+
SQLAlchemy CASE expression returning the component level as an integer.
|
|
2421
|
+
"""
|
|
2422
|
+
is_root_name = or_(name_col.contains(".run"), name_col.contains(".arun"))
|
|
2423
|
+
|
|
2424
|
+
return case(
|
|
2425
|
+
# Workflow root (level 3)
|
|
2426
|
+
(and_(workflow_id_col.isnot(None), is_root_name), 3),
|
|
2427
|
+
# Team root (level 2)
|
|
2428
|
+
(and_(team_id_col.isnot(None), is_root_name), 2),
|
|
2429
|
+
# Agent root (level 1)
|
|
2430
|
+
(and_(agent_id_col.isnot(None), is_root_name), 1),
|
|
2431
|
+
# Child span or unknown (level 0)
|
|
2432
|
+
else_=0,
|
|
2433
|
+
)
|
|
2434
|
+
|
|
2435
|
+
def upsert_trace(self, trace: "Trace") -> None:
|
|
2436
|
+
"""Create or update a single trace record in the database.
|
|
2437
|
+
|
|
2438
|
+
Uses INSERT ... ON CONFLICT DO UPDATE (upsert) to handle concurrent inserts
|
|
2439
|
+
atomically and avoid race conditions.
|
|
2440
|
+
|
|
2441
|
+
Args:
|
|
2442
|
+
trace: The Trace object to store (one per trace_id).
|
|
2443
|
+
"""
|
|
2444
|
+
try:
|
|
2445
|
+
table = self._get_table(table_type="traces", create_table_if_not_found=True)
|
|
2446
|
+
if table is None:
|
|
2447
|
+
return
|
|
2448
|
+
|
|
2449
|
+
trace_dict = trace.to_dict()
|
|
2450
|
+
trace_dict.pop("total_spans", None)
|
|
2451
|
+
trace_dict.pop("error_count", None)
|
|
2452
|
+
|
|
2453
|
+
with self.Session() as sess, sess.begin():
|
|
2454
|
+
# Use upsert to handle concurrent inserts atomically
|
|
2455
|
+
# On conflict, update fields while preserving existing non-null context values
|
|
2456
|
+
# and keeping the earliest start_time
|
|
2457
|
+
insert_stmt = postgresql.insert(table).values(trace_dict)
|
|
2458
|
+
|
|
2459
|
+
# Build component level expressions for comparing trace priority
|
|
2460
|
+
new_level = self._get_trace_component_level_expr(
|
|
2461
|
+
insert_stmt.excluded.workflow_id,
|
|
2462
|
+
insert_stmt.excluded.team_id,
|
|
2463
|
+
insert_stmt.excluded.agent_id,
|
|
2464
|
+
insert_stmt.excluded.name,
|
|
2465
|
+
)
|
|
2466
|
+
existing_level = self._get_trace_component_level_expr(
|
|
2467
|
+
table.c.workflow_id,
|
|
2468
|
+
table.c.team_id,
|
|
2469
|
+
table.c.agent_id,
|
|
2470
|
+
table.c.name,
|
|
2471
|
+
)
|
|
2472
|
+
|
|
2473
|
+
# Build the ON CONFLICT DO UPDATE clause
|
|
2474
|
+
# Use LEAST for start_time, GREATEST for end_time to capture full trace duration
|
|
2475
|
+
# Use COALESCE to preserve existing non-null context values
|
|
2476
|
+
upsert_stmt = insert_stmt.on_conflict_do_update(
|
|
2477
|
+
index_elements=["trace_id"],
|
|
2478
|
+
set_={
|
|
2479
|
+
"end_time": func.greatest(table.c.end_time, insert_stmt.excluded.end_time),
|
|
2480
|
+
"start_time": func.least(table.c.start_time, insert_stmt.excluded.start_time),
|
|
2481
|
+
"duration_ms": func.extract(
|
|
2482
|
+
"epoch",
|
|
2483
|
+
func.cast(
|
|
2484
|
+
func.greatest(table.c.end_time, insert_stmt.excluded.end_time),
|
|
2485
|
+
TIMESTAMP(timezone=True),
|
|
2486
|
+
)
|
|
2487
|
+
- func.cast(
|
|
2488
|
+
func.least(table.c.start_time, insert_stmt.excluded.start_time),
|
|
2489
|
+
TIMESTAMP(timezone=True),
|
|
2490
|
+
),
|
|
2491
|
+
)
|
|
2492
|
+
* 1000,
|
|
2493
|
+
"status": insert_stmt.excluded.status,
|
|
2494
|
+
# Update name only if new trace is from a higher-level component
|
|
2495
|
+
# Priority: workflow (3) > team (2) > agent (1) > child spans (0)
|
|
2496
|
+
"name": case(
|
|
2497
|
+
(new_level > existing_level, insert_stmt.excluded.name),
|
|
2498
|
+
else_=table.c.name,
|
|
2499
|
+
),
|
|
2500
|
+
# Preserve existing non-null context values using COALESCE
|
|
2501
|
+
"run_id": func.coalesce(insert_stmt.excluded.run_id, table.c.run_id),
|
|
2502
|
+
"session_id": func.coalesce(insert_stmt.excluded.session_id, table.c.session_id),
|
|
2503
|
+
"user_id": func.coalesce(insert_stmt.excluded.user_id, table.c.user_id),
|
|
2504
|
+
"agent_id": func.coalesce(insert_stmt.excluded.agent_id, table.c.agent_id),
|
|
2505
|
+
"team_id": func.coalesce(insert_stmt.excluded.team_id, table.c.team_id),
|
|
2506
|
+
"workflow_id": func.coalesce(insert_stmt.excluded.workflow_id, table.c.workflow_id),
|
|
2507
|
+
},
|
|
2508
|
+
)
|
|
2509
|
+
sess.execute(upsert_stmt)
|
|
2510
|
+
|
|
2511
|
+
except Exception as e:
|
|
2512
|
+
log_error(f"Error creating trace: {e}")
|
|
2513
|
+
# Don't raise - tracing should not break the main application flow
|
|
2514
|
+
|
|
2515
|
+
def get_trace(
|
|
2516
|
+
self,
|
|
2517
|
+
trace_id: Optional[str] = None,
|
|
2518
|
+
run_id: Optional[str] = None,
|
|
2519
|
+
):
|
|
2520
|
+
"""Get a single trace by trace_id or other filters.
|
|
2521
|
+
|
|
2522
|
+
Args:
|
|
2523
|
+
trace_id: The unique trace identifier.
|
|
2524
|
+
run_id: Filter by run ID (returns first match).
|
|
2525
|
+
|
|
2526
|
+
Returns:
|
|
2527
|
+
Optional[Trace]: The trace if found, None otherwise.
|
|
2528
|
+
|
|
2529
|
+
Note:
|
|
2530
|
+
If multiple filters are provided, trace_id takes precedence.
|
|
2531
|
+
For other filters, the most recent trace is returned.
|
|
2532
|
+
"""
|
|
2533
|
+
try:
|
|
2534
|
+
from agno.tracing.schemas import Trace
|
|
2535
|
+
|
|
2536
|
+
table = self._get_table(table_type="traces")
|
|
2537
|
+
if table is None:
|
|
2538
|
+
return None
|
|
2539
|
+
|
|
2540
|
+
# Get spans table for JOIN
|
|
2541
|
+
spans_table = self._get_table(table_type="spans")
|
|
2542
|
+
|
|
2543
|
+
with self.Session() as sess:
|
|
2544
|
+
# Build query with aggregated span counts
|
|
2545
|
+
stmt = self._get_traces_base_query(table, spans_table)
|
|
2546
|
+
|
|
2547
|
+
if trace_id:
|
|
2548
|
+
stmt = stmt.where(table.c.trace_id == trace_id)
|
|
2549
|
+
elif run_id:
|
|
2550
|
+
stmt = stmt.where(table.c.run_id == run_id)
|
|
2551
|
+
else:
|
|
2552
|
+
log_debug("get_trace called without any filter parameters")
|
|
2553
|
+
return None
|
|
2554
|
+
|
|
2555
|
+
# Order by most recent and get first result
|
|
2556
|
+
stmt = stmt.order_by(table.c.start_time.desc()).limit(1)
|
|
2557
|
+
result = sess.execute(stmt).fetchone()
|
|
2558
|
+
|
|
2559
|
+
if result:
|
|
2560
|
+
return Trace.from_dict(dict(result._mapping))
|
|
2561
|
+
return None
|
|
2562
|
+
|
|
2563
|
+
except Exception as e:
|
|
2564
|
+
log_error(f"Error getting trace: {e}")
|
|
2565
|
+
return None
|
|
2566
|
+
|
|
2567
|
+
def get_traces(
|
|
2568
|
+
self,
|
|
2569
|
+
run_id: Optional[str] = None,
|
|
2570
|
+
session_id: Optional[str] = None,
|
|
2571
|
+
user_id: Optional[str] = None,
|
|
2572
|
+
agent_id: Optional[str] = None,
|
|
2573
|
+
team_id: Optional[str] = None,
|
|
2574
|
+
workflow_id: Optional[str] = None,
|
|
2575
|
+
status: Optional[str] = None,
|
|
2576
|
+
start_time: Optional[datetime] = None,
|
|
2577
|
+
end_time: Optional[datetime] = None,
|
|
2578
|
+
limit: Optional[int] = 20,
|
|
2579
|
+
page: Optional[int] = 1,
|
|
2580
|
+
) -> tuple[List, int]:
|
|
2581
|
+
"""Get traces matching the provided filters with pagination.
|
|
2582
|
+
|
|
2583
|
+
Args:
|
|
2584
|
+
run_id: Filter by run ID.
|
|
2585
|
+
session_id: Filter by session ID.
|
|
2586
|
+
user_id: Filter by user ID.
|
|
2587
|
+
agent_id: Filter by agent ID.
|
|
2588
|
+
team_id: Filter by team ID.
|
|
2589
|
+
workflow_id: Filter by workflow ID.
|
|
2590
|
+
status: Filter by status (OK, ERROR, UNSET).
|
|
2591
|
+
start_time: Filter traces starting after this datetime.
|
|
2592
|
+
end_time: Filter traces ending before this datetime.
|
|
2593
|
+
limit: Maximum number of traces to return per page.
|
|
2594
|
+
page: Page number (1-indexed).
|
|
2595
|
+
|
|
2596
|
+
Returns:
|
|
2597
|
+
tuple[List[Trace], int]: Tuple of (list of matching traces, total count).
|
|
2598
|
+
"""
|
|
2599
|
+
try:
|
|
2600
|
+
from agno.tracing.schemas import Trace
|
|
2601
|
+
|
|
2602
|
+
table = self._get_table(table_type="traces")
|
|
2603
|
+
if table is None:
|
|
2604
|
+
log_debug("Traces table not found")
|
|
2605
|
+
return [], 0
|
|
2606
|
+
|
|
2607
|
+
# Get spans table for JOIN
|
|
2608
|
+
spans_table = self._get_table(table_type="spans")
|
|
2609
|
+
|
|
2610
|
+
with self.Session() as sess:
|
|
2611
|
+
# Build base query with aggregated span counts
|
|
2612
|
+
base_stmt = self._get_traces_base_query(table, spans_table)
|
|
2613
|
+
|
|
2614
|
+
# Apply filters
|
|
2615
|
+
if run_id:
|
|
2616
|
+
base_stmt = base_stmt.where(table.c.run_id == run_id)
|
|
2617
|
+
if session_id:
|
|
2618
|
+
base_stmt = base_stmt.where(table.c.session_id == session_id)
|
|
2619
|
+
if user_id:
|
|
2620
|
+
base_stmt = base_stmt.where(table.c.user_id == user_id)
|
|
2621
|
+
if agent_id:
|
|
2622
|
+
base_stmt = base_stmt.where(table.c.agent_id == agent_id)
|
|
2623
|
+
if team_id:
|
|
2624
|
+
base_stmt = base_stmt.where(table.c.team_id == team_id)
|
|
2625
|
+
if workflow_id:
|
|
2626
|
+
base_stmt = base_stmt.where(table.c.workflow_id == workflow_id)
|
|
2627
|
+
if status:
|
|
2628
|
+
base_stmt = base_stmt.where(table.c.status == status)
|
|
2629
|
+
if start_time:
|
|
2630
|
+
# Convert datetime to ISO string for comparison
|
|
2631
|
+
base_stmt = base_stmt.where(table.c.start_time >= start_time.isoformat())
|
|
2632
|
+
if end_time:
|
|
2633
|
+
# Convert datetime to ISO string for comparison
|
|
2634
|
+
base_stmt = base_stmt.where(table.c.end_time <= end_time.isoformat())
|
|
2635
|
+
|
|
2636
|
+
# Get total count
|
|
2637
|
+
count_stmt = select(func.count()).select_from(base_stmt.alias())
|
|
2638
|
+
total_count = sess.execute(count_stmt).scalar() or 0
|
|
2639
|
+
|
|
2640
|
+
# Apply pagination
|
|
2641
|
+
offset = (page - 1) * limit if page and limit else 0
|
|
2642
|
+
paginated_stmt = base_stmt.order_by(table.c.start_time.desc()).limit(limit).offset(offset)
|
|
2643
|
+
|
|
2644
|
+
results = sess.execute(paginated_stmt).fetchall()
|
|
2645
|
+
|
|
2646
|
+
traces = [Trace.from_dict(dict(row._mapping)) for row in results]
|
|
2647
|
+
return traces, total_count
|
|
2648
|
+
|
|
2649
|
+
except Exception as e:
|
|
2650
|
+
log_error(f"Error getting traces: {e}")
|
|
2651
|
+
return [], 0
|
|
2652
|
+
|
|
2653
|
+
def get_trace_stats(
|
|
2654
|
+
self,
|
|
2655
|
+
user_id: Optional[str] = None,
|
|
2656
|
+
agent_id: Optional[str] = None,
|
|
2657
|
+
team_id: Optional[str] = None,
|
|
2658
|
+
workflow_id: Optional[str] = None,
|
|
2659
|
+
start_time: Optional[datetime] = None,
|
|
2660
|
+
end_time: Optional[datetime] = None,
|
|
2661
|
+
limit: Optional[int] = 20,
|
|
2662
|
+
page: Optional[int] = 1,
|
|
2663
|
+
) -> tuple[List[Dict[str, Any]], int]:
|
|
2664
|
+
"""Get trace statistics grouped by session.
|
|
2665
|
+
|
|
2666
|
+
Args:
|
|
2667
|
+
user_id: Filter by user ID.
|
|
2668
|
+
agent_id: Filter by agent ID.
|
|
2669
|
+
team_id: Filter by team ID.
|
|
2670
|
+
workflow_id: Filter by workflow ID.
|
|
2671
|
+
start_time: Filter sessions with traces created after this datetime.
|
|
2672
|
+
end_time: Filter sessions with traces created before this datetime.
|
|
2673
|
+
limit: Maximum number of sessions to return per page.
|
|
2674
|
+
page: Page number (1-indexed).
|
|
2675
|
+
|
|
2676
|
+
Returns:
|
|
2677
|
+
tuple[List[Dict], int]: Tuple of (list of session stats dicts, total count).
|
|
2678
|
+
Each dict contains: session_id, user_id, agent_id, team_id, total_traces,
|
|
2679
|
+
first_trace_at, last_trace_at.
|
|
2680
|
+
"""
|
|
2681
|
+
try:
|
|
2682
|
+
table = self._get_table(table_type="traces")
|
|
2683
|
+
if table is None:
|
|
2684
|
+
log_debug("Traces table not found")
|
|
2685
|
+
return [], 0
|
|
2686
|
+
|
|
2687
|
+
with self.Session() as sess:
|
|
2688
|
+
# Build base query grouped by session_id
|
|
2689
|
+
base_stmt = (
|
|
2690
|
+
select(
|
|
2691
|
+
table.c.session_id,
|
|
2692
|
+
table.c.user_id,
|
|
2693
|
+
table.c.agent_id,
|
|
2694
|
+
table.c.team_id,
|
|
2695
|
+
table.c.workflow_id,
|
|
2696
|
+
func.count(table.c.trace_id).label("total_traces"),
|
|
2697
|
+
func.min(table.c.created_at).label("first_trace_at"),
|
|
2698
|
+
func.max(table.c.created_at).label("last_trace_at"),
|
|
2699
|
+
)
|
|
2700
|
+
.where(table.c.session_id.isnot(None)) # Only sessions with session_id
|
|
2701
|
+
.group_by(
|
|
2702
|
+
table.c.session_id, table.c.user_id, table.c.agent_id, table.c.team_id, table.c.workflow_id
|
|
2703
|
+
)
|
|
2704
|
+
)
|
|
2705
|
+
|
|
2706
|
+
# Apply filters
|
|
2707
|
+
if user_id:
|
|
2708
|
+
base_stmt = base_stmt.where(table.c.user_id == user_id)
|
|
2709
|
+
if workflow_id:
|
|
2710
|
+
base_stmt = base_stmt.where(table.c.workflow_id == workflow_id)
|
|
2711
|
+
if team_id:
|
|
2712
|
+
base_stmt = base_stmt.where(table.c.team_id == team_id)
|
|
2713
|
+
if agent_id:
|
|
2714
|
+
base_stmt = base_stmt.where(table.c.agent_id == agent_id)
|
|
2715
|
+
if start_time:
|
|
2716
|
+
# Convert datetime to ISO string for comparison
|
|
2717
|
+
base_stmt = base_stmt.where(table.c.created_at >= start_time.isoformat())
|
|
2718
|
+
if end_time:
|
|
2719
|
+
# Convert datetime to ISO string for comparison
|
|
2720
|
+
base_stmt = base_stmt.where(table.c.created_at <= end_time.isoformat())
|
|
2721
|
+
|
|
2722
|
+
# Get total count of sessions
|
|
2723
|
+
count_stmt = select(func.count()).select_from(base_stmt.alias())
|
|
2724
|
+
total_count = sess.execute(count_stmt).scalar() or 0
|
|
2725
|
+
|
|
2726
|
+
# Apply pagination and ordering
|
|
2727
|
+
offset = (page - 1) * limit if page and limit else 0
|
|
2728
|
+
paginated_stmt = base_stmt.order_by(func.max(table.c.created_at).desc()).limit(limit).offset(offset)
|
|
2729
|
+
|
|
2730
|
+
results = sess.execute(paginated_stmt).fetchall()
|
|
2731
|
+
|
|
2732
|
+
# Convert to list of dicts with datetime objects
|
|
2733
|
+
stats_list = []
|
|
2734
|
+
for row in results:
|
|
2735
|
+
# Convert ISO strings to datetime objects
|
|
2736
|
+
first_trace_at_str = row.first_trace_at
|
|
2737
|
+
last_trace_at_str = row.last_trace_at
|
|
2738
|
+
|
|
2739
|
+
# Parse ISO format strings to datetime objects
|
|
2740
|
+
first_trace_at = datetime.fromisoformat(first_trace_at_str.replace("Z", "+00:00"))
|
|
2741
|
+
last_trace_at = datetime.fromisoformat(last_trace_at_str.replace("Z", "+00:00"))
|
|
2742
|
+
|
|
2743
|
+
stats_list.append(
|
|
2744
|
+
{
|
|
2745
|
+
"session_id": row.session_id,
|
|
2746
|
+
"user_id": row.user_id,
|
|
2747
|
+
"agent_id": row.agent_id,
|
|
2748
|
+
"team_id": row.team_id,
|
|
2749
|
+
"workflow_id": row.workflow_id,
|
|
2750
|
+
"total_traces": row.total_traces,
|
|
2751
|
+
"first_trace_at": first_trace_at,
|
|
2752
|
+
"last_trace_at": last_trace_at,
|
|
2753
|
+
}
|
|
2754
|
+
)
|
|
2755
|
+
|
|
2756
|
+
return stats_list, total_count
|
|
2757
|
+
|
|
2758
|
+
except Exception as e:
|
|
2759
|
+
log_error(f"Error getting trace stats: {e}")
|
|
2760
|
+
return [], 0
|
|
2761
|
+
|
|
2762
|
+
# --- Spans ---
|
|
2763
|
+
def create_span(self, span: "Span") -> None:
|
|
2764
|
+
"""Create a single span in the database.
|
|
2765
|
+
|
|
2766
|
+
Args:
|
|
2767
|
+
span: The Span object to store.
|
|
2768
|
+
"""
|
|
2769
|
+
try:
|
|
2770
|
+
table = self._get_table(table_type="spans", create_table_if_not_found=True)
|
|
2771
|
+
if table is None:
|
|
2772
|
+
return
|
|
2773
|
+
|
|
2774
|
+
with self.Session() as sess, sess.begin():
|
|
2775
|
+
stmt = postgresql.insert(table).values(span.to_dict())
|
|
2776
|
+
sess.execute(stmt)
|
|
2777
|
+
|
|
2778
|
+
except Exception as e:
|
|
2779
|
+
log_error(f"Error creating span: {e}")
|
|
2780
|
+
|
|
2781
|
+
def create_spans(self, spans: List) -> None:
|
|
2782
|
+
"""Create multiple spans in the database as a batch.
|
|
2783
|
+
|
|
2784
|
+
Args:
|
|
2785
|
+
spans: List of Span objects to store.
|
|
2786
|
+
"""
|
|
2787
|
+
if not spans:
|
|
2788
|
+
return
|
|
2789
|
+
|
|
2790
|
+
try:
|
|
2791
|
+
table = self._get_table(table_type="spans", create_table_if_not_found=True)
|
|
2792
|
+
if table is None:
|
|
2793
|
+
return
|
|
2794
|
+
|
|
2795
|
+
with self.Session() as sess, sess.begin():
|
|
2796
|
+
for span in spans:
|
|
2797
|
+
stmt = postgresql.insert(table).values(span.to_dict())
|
|
2798
|
+
sess.execute(stmt)
|
|
2799
|
+
|
|
2800
|
+
except Exception as e:
|
|
2801
|
+
log_error(f"Error creating spans batch: {e}")
|
|
2802
|
+
|
|
2803
|
+
def get_span(self, span_id: str):
|
|
2804
|
+
"""Get a single span by its span_id.
|
|
2805
|
+
|
|
2806
|
+
Args:
|
|
2807
|
+
span_id: The unique span identifier.
|
|
2808
|
+
|
|
2809
|
+
Returns:
|
|
2810
|
+
Optional[Span]: The span if found, None otherwise.
|
|
2811
|
+
"""
|
|
2812
|
+
try:
|
|
2813
|
+
from agno.tracing.schemas import Span
|
|
2814
|
+
|
|
2815
|
+
table = self._get_table(table_type="spans")
|
|
2816
|
+
if table is None:
|
|
2817
|
+
return None
|
|
2818
|
+
|
|
2819
|
+
with self.Session() as sess:
|
|
2820
|
+
stmt = select(table).where(table.c.span_id == span_id)
|
|
2821
|
+
result = sess.execute(stmt).fetchone()
|
|
2822
|
+
if result:
|
|
2823
|
+
return Span.from_dict(dict(result._mapping))
|
|
2824
|
+
return None
|
|
2825
|
+
|
|
2826
|
+
except Exception as e:
|
|
2827
|
+
log_error(f"Error getting span: {e}")
|
|
2828
|
+
return None
|
|
2829
|
+
|
|
2830
|
+
def get_spans(
|
|
2831
|
+
self,
|
|
2832
|
+
trace_id: Optional[str] = None,
|
|
2833
|
+
parent_span_id: Optional[str] = None,
|
|
2834
|
+
limit: Optional[int] = 1000,
|
|
2835
|
+
) -> List:
|
|
2836
|
+
"""Get spans matching the provided filters.
|
|
2837
|
+
|
|
2838
|
+
Args:
|
|
2839
|
+
trace_id: Filter by trace ID.
|
|
2840
|
+
parent_span_id: Filter by parent span ID.
|
|
2841
|
+
limit: Maximum number of spans to return.
|
|
2842
|
+
|
|
2843
|
+
Returns:
|
|
2844
|
+
List[Span]: List of matching spans.
|
|
2845
|
+
"""
|
|
2846
|
+
try:
|
|
2847
|
+
from agno.tracing.schemas import Span
|
|
2848
|
+
|
|
2849
|
+
table = self._get_table(table_type="spans")
|
|
2850
|
+
if table is None:
|
|
2851
|
+
return []
|
|
2852
|
+
|
|
2853
|
+
with self.Session() as sess:
|
|
2854
|
+
stmt = select(table)
|
|
2855
|
+
|
|
2856
|
+
# Apply filters
|
|
2857
|
+
if trace_id:
|
|
2858
|
+
stmt = stmt.where(table.c.trace_id == trace_id)
|
|
2859
|
+
if parent_span_id:
|
|
2860
|
+
stmt = stmt.where(table.c.parent_span_id == parent_span_id)
|
|
2861
|
+
|
|
2862
|
+
if limit:
|
|
2863
|
+
stmt = stmt.limit(limit)
|
|
2864
|
+
|
|
2865
|
+
results = sess.execute(stmt).fetchall()
|
|
2866
|
+
return [Span.from_dict(dict(row._mapping)) for row in results]
|
|
2867
|
+
|
|
2868
|
+
except Exception as e:
|
|
2869
|
+
log_error(f"Error getting spans: {e}")
|
|
2870
|
+
return []
|