agno 2.2.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agno/__init__.py +8 -0
- agno/agent/__init__.py +51 -0
- agno/agent/agent.py +10405 -0
- agno/api/__init__.py +0 -0
- agno/api/agent.py +28 -0
- agno/api/api.py +40 -0
- agno/api/evals.py +22 -0
- agno/api/os.py +17 -0
- agno/api/routes.py +13 -0
- agno/api/schemas/__init__.py +9 -0
- agno/api/schemas/agent.py +16 -0
- agno/api/schemas/evals.py +16 -0
- agno/api/schemas/os.py +14 -0
- agno/api/schemas/response.py +6 -0
- agno/api/schemas/team.py +16 -0
- agno/api/schemas/utils.py +21 -0
- agno/api/schemas/workflows.py +16 -0
- agno/api/settings.py +53 -0
- agno/api/team.py +30 -0
- agno/api/workflow.py +28 -0
- agno/cloud/aws/base.py +214 -0
- agno/cloud/aws/s3/__init__.py +2 -0
- agno/cloud/aws/s3/api_client.py +43 -0
- agno/cloud/aws/s3/bucket.py +195 -0
- agno/cloud/aws/s3/object.py +57 -0
- agno/culture/__init__.py +3 -0
- agno/culture/manager.py +956 -0
- agno/db/__init__.py +24 -0
- agno/db/async_postgres/__init__.py +3 -0
- agno/db/base.py +598 -0
- agno/db/dynamo/__init__.py +3 -0
- agno/db/dynamo/dynamo.py +2042 -0
- agno/db/dynamo/schemas.py +314 -0
- agno/db/dynamo/utils.py +743 -0
- agno/db/firestore/__init__.py +3 -0
- agno/db/firestore/firestore.py +1795 -0
- agno/db/firestore/schemas.py +140 -0
- agno/db/firestore/utils.py +376 -0
- agno/db/gcs_json/__init__.py +3 -0
- agno/db/gcs_json/gcs_json_db.py +1335 -0
- agno/db/gcs_json/utils.py +228 -0
- agno/db/in_memory/__init__.py +3 -0
- agno/db/in_memory/in_memory_db.py +1160 -0
- agno/db/in_memory/utils.py +230 -0
- agno/db/json/__init__.py +3 -0
- agno/db/json/json_db.py +1328 -0
- agno/db/json/utils.py +230 -0
- agno/db/migrations/__init__.py +0 -0
- agno/db/migrations/v1_to_v2.py +635 -0
- agno/db/mongo/__init__.py +17 -0
- agno/db/mongo/async_mongo.py +2026 -0
- agno/db/mongo/mongo.py +1982 -0
- agno/db/mongo/schemas.py +87 -0
- agno/db/mongo/utils.py +259 -0
- agno/db/mysql/__init__.py +3 -0
- agno/db/mysql/mysql.py +2308 -0
- agno/db/mysql/schemas.py +138 -0
- agno/db/mysql/utils.py +355 -0
- agno/db/postgres/__init__.py +4 -0
- agno/db/postgres/async_postgres.py +1927 -0
- agno/db/postgres/postgres.py +2260 -0
- agno/db/postgres/schemas.py +139 -0
- agno/db/postgres/utils.py +442 -0
- agno/db/redis/__init__.py +3 -0
- agno/db/redis/redis.py +1660 -0
- agno/db/redis/schemas.py +123 -0
- agno/db/redis/utils.py +346 -0
- agno/db/schemas/__init__.py +4 -0
- agno/db/schemas/culture.py +120 -0
- agno/db/schemas/evals.py +33 -0
- agno/db/schemas/knowledge.py +40 -0
- agno/db/schemas/memory.py +46 -0
- agno/db/schemas/metrics.py +0 -0
- agno/db/singlestore/__init__.py +3 -0
- agno/db/singlestore/schemas.py +130 -0
- agno/db/singlestore/singlestore.py +2272 -0
- agno/db/singlestore/utils.py +384 -0
- agno/db/sqlite/__init__.py +4 -0
- agno/db/sqlite/async_sqlite.py +2293 -0
- agno/db/sqlite/schemas.py +133 -0
- agno/db/sqlite/sqlite.py +2288 -0
- agno/db/sqlite/utils.py +431 -0
- agno/db/surrealdb/__init__.py +3 -0
- agno/db/surrealdb/metrics.py +292 -0
- agno/db/surrealdb/models.py +309 -0
- agno/db/surrealdb/queries.py +71 -0
- agno/db/surrealdb/surrealdb.py +1353 -0
- agno/db/surrealdb/utils.py +147 -0
- agno/db/utils.py +116 -0
- agno/debug.py +18 -0
- agno/eval/__init__.py +14 -0
- agno/eval/accuracy.py +834 -0
- agno/eval/performance.py +773 -0
- agno/eval/reliability.py +306 -0
- agno/eval/utils.py +119 -0
- agno/exceptions.py +161 -0
- agno/filters.py +354 -0
- agno/guardrails/__init__.py +6 -0
- agno/guardrails/base.py +19 -0
- agno/guardrails/openai.py +144 -0
- agno/guardrails/pii.py +94 -0
- agno/guardrails/prompt_injection.py +52 -0
- agno/integrations/__init__.py +0 -0
- agno/integrations/discord/__init__.py +3 -0
- agno/integrations/discord/client.py +203 -0
- agno/knowledge/__init__.py +5 -0
- agno/knowledge/chunking/__init__.py +0 -0
- agno/knowledge/chunking/agentic.py +79 -0
- agno/knowledge/chunking/document.py +91 -0
- agno/knowledge/chunking/fixed.py +57 -0
- agno/knowledge/chunking/markdown.py +151 -0
- agno/knowledge/chunking/recursive.py +63 -0
- agno/knowledge/chunking/row.py +39 -0
- agno/knowledge/chunking/semantic.py +86 -0
- agno/knowledge/chunking/strategy.py +165 -0
- agno/knowledge/content.py +74 -0
- agno/knowledge/document/__init__.py +5 -0
- agno/knowledge/document/base.py +58 -0
- agno/knowledge/embedder/__init__.py +5 -0
- agno/knowledge/embedder/aws_bedrock.py +343 -0
- agno/knowledge/embedder/azure_openai.py +210 -0
- agno/knowledge/embedder/base.py +23 -0
- agno/knowledge/embedder/cohere.py +323 -0
- agno/knowledge/embedder/fastembed.py +62 -0
- agno/knowledge/embedder/fireworks.py +13 -0
- agno/knowledge/embedder/google.py +258 -0
- agno/knowledge/embedder/huggingface.py +94 -0
- agno/knowledge/embedder/jina.py +182 -0
- agno/knowledge/embedder/langdb.py +22 -0
- agno/knowledge/embedder/mistral.py +206 -0
- agno/knowledge/embedder/nebius.py +13 -0
- agno/knowledge/embedder/ollama.py +154 -0
- agno/knowledge/embedder/openai.py +195 -0
- agno/knowledge/embedder/sentence_transformer.py +63 -0
- agno/knowledge/embedder/together.py +13 -0
- agno/knowledge/embedder/vllm.py +262 -0
- agno/knowledge/embedder/voyageai.py +165 -0
- agno/knowledge/knowledge.py +1988 -0
- agno/knowledge/reader/__init__.py +7 -0
- agno/knowledge/reader/arxiv_reader.py +81 -0
- agno/knowledge/reader/base.py +95 -0
- agno/knowledge/reader/csv_reader.py +166 -0
- agno/knowledge/reader/docx_reader.py +82 -0
- agno/knowledge/reader/field_labeled_csv_reader.py +292 -0
- agno/knowledge/reader/firecrawl_reader.py +201 -0
- agno/knowledge/reader/json_reader.py +87 -0
- agno/knowledge/reader/markdown_reader.py +137 -0
- agno/knowledge/reader/pdf_reader.py +431 -0
- agno/knowledge/reader/pptx_reader.py +101 -0
- agno/knowledge/reader/reader_factory.py +313 -0
- agno/knowledge/reader/s3_reader.py +89 -0
- agno/knowledge/reader/tavily_reader.py +194 -0
- agno/knowledge/reader/text_reader.py +115 -0
- agno/knowledge/reader/web_search_reader.py +372 -0
- agno/knowledge/reader/website_reader.py +455 -0
- agno/knowledge/reader/wikipedia_reader.py +59 -0
- agno/knowledge/reader/youtube_reader.py +78 -0
- agno/knowledge/remote_content/__init__.py +0 -0
- agno/knowledge/remote_content/remote_content.py +88 -0
- agno/knowledge/reranker/__init__.py +3 -0
- agno/knowledge/reranker/base.py +14 -0
- agno/knowledge/reranker/cohere.py +64 -0
- agno/knowledge/reranker/infinity.py +195 -0
- agno/knowledge/reranker/sentence_transformer.py +54 -0
- agno/knowledge/types.py +39 -0
- agno/knowledge/utils.py +189 -0
- agno/media.py +462 -0
- agno/memory/__init__.py +3 -0
- agno/memory/manager.py +1327 -0
- agno/models/__init__.py +0 -0
- agno/models/aimlapi/__init__.py +5 -0
- agno/models/aimlapi/aimlapi.py +45 -0
- agno/models/anthropic/__init__.py +5 -0
- agno/models/anthropic/claude.py +757 -0
- agno/models/aws/__init__.py +15 -0
- agno/models/aws/bedrock.py +701 -0
- agno/models/aws/claude.py +378 -0
- agno/models/azure/__init__.py +18 -0
- agno/models/azure/ai_foundry.py +485 -0
- agno/models/azure/openai_chat.py +131 -0
- agno/models/base.py +2175 -0
- agno/models/cerebras/__init__.py +12 -0
- agno/models/cerebras/cerebras.py +501 -0
- agno/models/cerebras/cerebras_openai.py +112 -0
- agno/models/cohere/__init__.py +5 -0
- agno/models/cohere/chat.py +389 -0
- agno/models/cometapi/__init__.py +5 -0
- agno/models/cometapi/cometapi.py +57 -0
- agno/models/dashscope/__init__.py +5 -0
- agno/models/dashscope/dashscope.py +91 -0
- agno/models/deepinfra/__init__.py +5 -0
- agno/models/deepinfra/deepinfra.py +28 -0
- agno/models/deepseek/__init__.py +5 -0
- agno/models/deepseek/deepseek.py +61 -0
- agno/models/defaults.py +1 -0
- agno/models/fireworks/__init__.py +5 -0
- agno/models/fireworks/fireworks.py +26 -0
- agno/models/google/__init__.py +5 -0
- agno/models/google/gemini.py +1085 -0
- agno/models/groq/__init__.py +5 -0
- agno/models/groq/groq.py +556 -0
- agno/models/huggingface/__init__.py +5 -0
- agno/models/huggingface/huggingface.py +491 -0
- agno/models/ibm/__init__.py +5 -0
- agno/models/ibm/watsonx.py +422 -0
- agno/models/internlm/__init__.py +3 -0
- agno/models/internlm/internlm.py +26 -0
- agno/models/langdb/__init__.py +1 -0
- agno/models/langdb/langdb.py +48 -0
- agno/models/litellm/__init__.py +14 -0
- agno/models/litellm/chat.py +468 -0
- agno/models/litellm/litellm_openai.py +25 -0
- agno/models/llama_cpp/__init__.py +5 -0
- agno/models/llama_cpp/llama_cpp.py +22 -0
- agno/models/lmstudio/__init__.py +5 -0
- agno/models/lmstudio/lmstudio.py +25 -0
- agno/models/message.py +434 -0
- agno/models/meta/__init__.py +12 -0
- agno/models/meta/llama.py +475 -0
- agno/models/meta/llama_openai.py +78 -0
- agno/models/metrics.py +120 -0
- agno/models/mistral/__init__.py +5 -0
- agno/models/mistral/mistral.py +432 -0
- agno/models/nebius/__init__.py +3 -0
- agno/models/nebius/nebius.py +54 -0
- agno/models/nexus/__init__.py +3 -0
- agno/models/nexus/nexus.py +22 -0
- agno/models/nvidia/__init__.py +5 -0
- agno/models/nvidia/nvidia.py +28 -0
- agno/models/ollama/__init__.py +5 -0
- agno/models/ollama/chat.py +441 -0
- agno/models/openai/__init__.py +9 -0
- agno/models/openai/chat.py +883 -0
- agno/models/openai/like.py +27 -0
- agno/models/openai/responses.py +1050 -0
- agno/models/openrouter/__init__.py +5 -0
- agno/models/openrouter/openrouter.py +66 -0
- agno/models/perplexity/__init__.py +5 -0
- agno/models/perplexity/perplexity.py +187 -0
- agno/models/portkey/__init__.py +3 -0
- agno/models/portkey/portkey.py +81 -0
- agno/models/requesty/__init__.py +5 -0
- agno/models/requesty/requesty.py +52 -0
- agno/models/response.py +199 -0
- agno/models/sambanova/__init__.py +5 -0
- agno/models/sambanova/sambanova.py +28 -0
- agno/models/siliconflow/__init__.py +5 -0
- agno/models/siliconflow/siliconflow.py +25 -0
- agno/models/together/__init__.py +5 -0
- agno/models/together/together.py +25 -0
- agno/models/utils.py +266 -0
- agno/models/vercel/__init__.py +3 -0
- agno/models/vercel/v0.py +26 -0
- agno/models/vertexai/__init__.py +0 -0
- agno/models/vertexai/claude.py +70 -0
- agno/models/vllm/__init__.py +3 -0
- agno/models/vllm/vllm.py +78 -0
- agno/models/xai/__init__.py +3 -0
- agno/models/xai/xai.py +113 -0
- agno/os/__init__.py +3 -0
- agno/os/app.py +876 -0
- agno/os/auth.py +57 -0
- agno/os/config.py +104 -0
- agno/os/interfaces/__init__.py +1 -0
- agno/os/interfaces/a2a/__init__.py +3 -0
- agno/os/interfaces/a2a/a2a.py +42 -0
- agno/os/interfaces/a2a/router.py +250 -0
- agno/os/interfaces/a2a/utils.py +924 -0
- agno/os/interfaces/agui/__init__.py +3 -0
- agno/os/interfaces/agui/agui.py +47 -0
- agno/os/interfaces/agui/router.py +144 -0
- agno/os/interfaces/agui/utils.py +534 -0
- agno/os/interfaces/base.py +25 -0
- agno/os/interfaces/slack/__init__.py +3 -0
- agno/os/interfaces/slack/router.py +148 -0
- agno/os/interfaces/slack/security.py +30 -0
- agno/os/interfaces/slack/slack.py +47 -0
- agno/os/interfaces/whatsapp/__init__.py +3 -0
- agno/os/interfaces/whatsapp/router.py +211 -0
- agno/os/interfaces/whatsapp/security.py +53 -0
- agno/os/interfaces/whatsapp/whatsapp.py +36 -0
- agno/os/mcp.py +292 -0
- agno/os/middleware/__init__.py +7 -0
- agno/os/middleware/jwt.py +233 -0
- agno/os/router.py +1763 -0
- agno/os/routers/__init__.py +3 -0
- agno/os/routers/evals/__init__.py +3 -0
- agno/os/routers/evals/evals.py +430 -0
- agno/os/routers/evals/schemas.py +142 -0
- agno/os/routers/evals/utils.py +162 -0
- agno/os/routers/health.py +31 -0
- agno/os/routers/home.py +52 -0
- agno/os/routers/knowledge/__init__.py +3 -0
- agno/os/routers/knowledge/knowledge.py +997 -0
- agno/os/routers/knowledge/schemas.py +178 -0
- agno/os/routers/memory/__init__.py +3 -0
- agno/os/routers/memory/memory.py +515 -0
- agno/os/routers/memory/schemas.py +62 -0
- agno/os/routers/metrics/__init__.py +3 -0
- agno/os/routers/metrics/metrics.py +190 -0
- agno/os/routers/metrics/schemas.py +47 -0
- agno/os/routers/session/__init__.py +3 -0
- agno/os/routers/session/session.py +997 -0
- agno/os/schema.py +1055 -0
- agno/os/settings.py +43 -0
- agno/os/utils.py +630 -0
- agno/py.typed +0 -0
- agno/reasoning/__init__.py +0 -0
- agno/reasoning/anthropic.py +80 -0
- agno/reasoning/azure_ai_foundry.py +67 -0
- agno/reasoning/deepseek.py +63 -0
- agno/reasoning/default.py +97 -0
- agno/reasoning/gemini.py +73 -0
- agno/reasoning/groq.py +71 -0
- agno/reasoning/helpers.py +63 -0
- agno/reasoning/ollama.py +67 -0
- agno/reasoning/openai.py +86 -0
- agno/reasoning/step.py +31 -0
- agno/reasoning/vertexai.py +76 -0
- agno/run/__init__.py +6 -0
- agno/run/agent.py +787 -0
- agno/run/base.py +229 -0
- agno/run/cancel.py +81 -0
- agno/run/messages.py +32 -0
- agno/run/team.py +753 -0
- agno/run/workflow.py +708 -0
- agno/session/__init__.py +10 -0
- agno/session/agent.py +295 -0
- agno/session/summary.py +265 -0
- agno/session/team.py +392 -0
- agno/session/workflow.py +205 -0
- agno/team/__init__.py +37 -0
- agno/team/team.py +8793 -0
- agno/tools/__init__.py +10 -0
- agno/tools/agentql.py +120 -0
- agno/tools/airflow.py +69 -0
- agno/tools/api.py +122 -0
- agno/tools/apify.py +314 -0
- agno/tools/arxiv.py +127 -0
- agno/tools/aws_lambda.py +53 -0
- agno/tools/aws_ses.py +66 -0
- agno/tools/baidusearch.py +89 -0
- agno/tools/bitbucket.py +292 -0
- agno/tools/brandfetch.py +213 -0
- agno/tools/bravesearch.py +106 -0
- agno/tools/brightdata.py +367 -0
- agno/tools/browserbase.py +209 -0
- agno/tools/calcom.py +255 -0
- agno/tools/calculator.py +151 -0
- agno/tools/cartesia.py +187 -0
- agno/tools/clickup.py +244 -0
- agno/tools/confluence.py +240 -0
- agno/tools/crawl4ai.py +158 -0
- agno/tools/csv_toolkit.py +185 -0
- agno/tools/dalle.py +110 -0
- agno/tools/daytona.py +475 -0
- agno/tools/decorator.py +262 -0
- agno/tools/desi_vocal.py +108 -0
- agno/tools/discord.py +161 -0
- agno/tools/docker.py +716 -0
- agno/tools/duckdb.py +379 -0
- agno/tools/duckduckgo.py +91 -0
- agno/tools/e2b.py +703 -0
- agno/tools/eleven_labs.py +196 -0
- agno/tools/email.py +67 -0
- agno/tools/evm.py +129 -0
- agno/tools/exa.py +396 -0
- agno/tools/fal.py +127 -0
- agno/tools/file.py +240 -0
- agno/tools/file_generation.py +350 -0
- agno/tools/financial_datasets.py +288 -0
- agno/tools/firecrawl.py +143 -0
- agno/tools/function.py +1187 -0
- agno/tools/giphy.py +93 -0
- agno/tools/github.py +1760 -0
- agno/tools/gmail.py +922 -0
- agno/tools/google_bigquery.py +117 -0
- agno/tools/google_drive.py +270 -0
- agno/tools/google_maps.py +253 -0
- agno/tools/googlecalendar.py +674 -0
- agno/tools/googlesearch.py +98 -0
- agno/tools/googlesheets.py +377 -0
- agno/tools/hackernews.py +77 -0
- agno/tools/jina.py +101 -0
- agno/tools/jira.py +170 -0
- agno/tools/knowledge.py +218 -0
- agno/tools/linear.py +426 -0
- agno/tools/linkup.py +58 -0
- agno/tools/local_file_system.py +90 -0
- agno/tools/lumalab.py +183 -0
- agno/tools/mcp/__init__.py +10 -0
- agno/tools/mcp/mcp.py +331 -0
- agno/tools/mcp/multi_mcp.py +347 -0
- agno/tools/mcp/params.py +24 -0
- agno/tools/mcp_toolbox.py +284 -0
- agno/tools/mem0.py +193 -0
- agno/tools/memori.py +339 -0
- agno/tools/memory.py +419 -0
- agno/tools/mlx_transcribe.py +139 -0
- agno/tools/models/__init__.py +0 -0
- agno/tools/models/azure_openai.py +190 -0
- agno/tools/models/gemini.py +203 -0
- agno/tools/models/groq.py +158 -0
- agno/tools/models/morph.py +186 -0
- agno/tools/models/nebius.py +124 -0
- agno/tools/models_labs.py +195 -0
- agno/tools/moviepy_video.py +349 -0
- agno/tools/neo4j.py +134 -0
- agno/tools/newspaper.py +46 -0
- agno/tools/newspaper4k.py +93 -0
- agno/tools/notion.py +204 -0
- agno/tools/openai.py +202 -0
- agno/tools/openbb.py +160 -0
- agno/tools/opencv.py +321 -0
- agno/tools/openweather.py +233 -0
- agno/tools/oxylabs.py +385 -0
- agno/tools/pandas.py +102 -0
- agno/tools/parallel.py +314 -0
- agno/tools/postgres.py +257 -0
- agno/tools/pubmed.py +188 -0
- agno/tools/python.py +205 -0
- agno/tools/reasoning.py +283 -0
- agno/tools/reddit.py +467 -0
- agno/tools/replicate.py +117 -0
- agno/tools/resend.py +62 -0
- agno/tools/scrapegraph.py +222 -0
- agno/tools/searxng.py +152 -0
- agno/tools/serpapi.py +116 -0
- agno/tools/serper.py +255 -0
- agno/tools/shell.py +53 -0
- agno/tools/slack.py +136 -0
- agno/tools/sleep.py +20 -0
- agno/tools/spider.py +116 -0
- agno/tools/sql.py +154 -0
- agno/tools/streamlit/__init__.py +0 -0
- agno/tools/streamlit/components.py +113 -0
- agno/tools/tavily.py +254 -0
- agno/tools/telegram.py +48 -0
- agno/tools/todoist.py +218 -0
- agno/tools/tool_registry.py +1 -0
- agno/tools/toolkit.py +146 -0
- agno/tools/trafilatura.py +388 -0
- agno/tools/trello.py +274 -0
- agno/tools/twilio.py +186 -0
- agno/tools/user_control_flow.py +78 -0
- agno/tools/valyu.py +228 -0
- agno/tools/visualization.py +467 -0
- agno/tools/webbrowser.py +28 -0
- agno/tools/webex.py +76 -0
- agno/tools/website.py +54 -0
- agno/tools/webtools.py +45 -0
- agno/tools/whatsapp.py +286 -0
- agno/tools/wikipedia.py +63 -0
- agno/tools/workflow.py +278 -0
- agno/tools/x.py +335 -0
- agno/tools/yfinance.py +257 -0
- agno/tools/youtube.py +184 -0
- agno/tools/zendesk.py +82 -0
- agno/tools/zep.py +454 -0
- agno/tools/zoom.py +382 -0
- agno/utils/__init__.py +0 -0
- agno/utils/agent.py +820 -0
- agno/utils/audio.py +49 -0
- agno/utils/certs.py +27 -0
- agno/utils/code_execution.py +11 -0
- agno/utils/common.py +132 -0
- agno/utils/dttm.py +13 -0
- agno/utils/enum.py +22 -0
- agno/utils/env.py +11 -0
- agno/utils/events.py +696 -0
- agno/utils/format_str.py +16 -0
- agno/utils/functions.py +166 -0
- agno/utils/gemini.py +426 -0
- agno/utils/hooks.py +57 -0
- agno/utils/http.py +74 -0
- agno/utils/json_schema.py +234 -0
- agno/utils/knowledge.py +36 -0
- agno/utils/location.py +19 -0
- agno/utils/log.py +255 -0
- agno/utils/mcp.py +214 -0
- agno/utils/media.py +352 -0
- agno/utils/merge_dict.py +41 -0
- agno/utils/message.py +118 -0
- agno/utils/models/__init__.py +0 -0
- agno/utils/models/ai_foundry.py +43 -0
- agno/utils/models/claude.py +358 -0
- agno/utils/models/cohere.py +87 -0
- agno/utils/models/llama.py +78 -0
- agno/utils/models/mistral.py +98 -0
- agno/utils/models/openai_responses.py +140 -0
- agno/utils/models/schema_utils.py +153 -0
- agno/utils/models/watsonx.py +41 -0
- agno/utils/openai.py +257 -0
- agno/utils/pickle.py +32 -0
- agno/utils/pprint.py +178 -0
- agno/utils/print_response/__init__.py +0 -0
- agno/utils/print_response/agent.py +842 -0
- agno/utils/print_response/team.py +1724 -0
- agno/utils/print_response/workflow.py +1668 -0
- agno/utils/prompts.py +111 -0
- agno/utils/reasoning.py +108 -0
- agno/utils/response.py +163 -0
- agno/utils/response_iterator.py +17 -0
- agno/utils/safe_formatter.py +24 -0
- agno/utils/serialize.py +32 -0
- agno/utils/shell.py +22 -0
- agno/utils/streamlit.py +487 -0
- agno/utils/string.py +231 -0
- agno/utils/team.py +139 -0
- agno/utils/timer.py +41 -0
- agno/utils/tools.py +102 -0
- agno/utils/web.py +23 -0
- agno/utils/whatsapp.py +305 -0
- agno/utils/yaml_io.py +25 -0
- agno/vectordb/__init__.py +3 -0
- agno/vectordb/base.py +127 -0
- agno/vectordb/cassandra/__init__.py +5 -0
- agno/vectordb/cassandra/cassandra.py +501 -0
- agno/vectordb/cassandra/extra_param_mixin.py +11 -0
- agno/vectordb/cassandra/index.py +13 -0
- agno/vectordb/chroma/__init__.py +5 -0
- agno/vectordb/chroma/chromadb.py +929 -0
- agno/vectordb/clickhouse/__init__.py +9 -0
- agno/vectordb/clickhouse/clickhousedb.py +835 -0
- agno/vectordb/clickhouse/index.py +9 -0
- agno/vectordb/couchbase/__init__.py +3 -0
- agno/vectordb/couchbase/couchbase.py +1442 -0
- agno/vectordb/distance.py +7 -0
- agno/vectordb/lancedb/__init__.py +6 -0
- agno/vectordb/lancedb/lance_db.py +995 -0
- agno/vectordb/langchaindb/__init__.py +5 -0
- agno/vectordb/langchaindb/langchaindb.py +163 -0
- agno/vectordb/lightrag/__init__.py +5 -0
- agno/vectordb/lightrag/lightrag.py +388 -0
- agno/vectordb/llamaindex/__init__.py +3 -0
- agno/vectordb/llamaindex/llamaindexdb.py +166 -0
- agno/vectordb/milvus/__init__.py +4 -0
- agno/vectordb/milvus/milvus.py +1182 -0
- agno/vectordb/mongodb/__init__.py +9 -0
- agno/vectordb/mongodb/mongodb.py +1417 -0
- agno/vectordb/pgvector/__init__.py +12 -0
- agno/vectordb/pgvector/index.py +23 -0
- agno/vectordb/pgvector/pgvector.py +1462 -0
- agno/vectordb/pineconedb/__init__.py +5 -0
- agno/vectordb/pineconedb/pineconedb.py +747 -0
- agno/vectordb/qdrant/__init__.py +5 -0
- agno/vectordb/qdrant/qdrant.py +1134 -0
- agno/vectordb/redis/__init__.py +9 -0
- agno/vectordb/redis/redisdb.py +694 -0
- agno/vectordb/search.py +7 -0
- agno/vectordb/singlestore/__init__.py +10 -0
- agno/vectordb/singlestore/index.py +41 -0
- agno/vectordb/singlestore/singlestore.py +763 -0
- agno/vectordb/surrealdb/__init__.py +3 -0
- agno/vectordb/surrealdb/surrealdb.py +699 -0
- agno/vectordb/upstashdb/__init__.py +5 -0
- agno/vectordb/upstashdb/upstashdb.py +718 -0
- agno/vectordb/weaviate/__init__.py +8 -0
- agno/vectordb/weaviate/index.py +15 -0
- agno/vectordb/weaviate/weaviate.py +1005 -0
- agno/workflow/__init__.py +23 -0
- agno/workflow/agent.py +299 -0
- agno/workflow/condition.py +738 -0
- agno/workflow/loop.py +735 -0
- agno/workflow/parallel.py +824 -0
- agno/workflow/router.py +702 -0
- agno/workflow/step.py +1432 -0
- agno/workflow/steps.py +592 -0
- agno/workflow/types.py +520 -0
- agno/workflow/workflow.py +4321 -0
- agno-2.2.13.dist-info/METADATA +614 -0
- agno-2.2.13.dist-info/RECORD +575 -0
- agno-2.2.13.dist-info/WHEEL +5 -0
- agno-2.2.13.dist-info/licenses/LICENSE +201 -0
- agno-2.2.13.dist-info/top_level.txt +1 -0
agno/db/dynamo/dynamo.py
ADDED
|
@@ -0,0 +1,2042 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import time
|
|
3
|
+
from datetime import date, datetime, timedelta, timezone
|
|
4
|
+
from os import getenv
|
|
5
|
+
from typing import Any, Dict, List, Optional, Tuple, Union
|
|
6
|
+
|
|
7
|
+
from agno.db.base import BaseDb, SessionType
|
|
8
|
+
from agno.db.dynamo.schemas import get_table_schema_definition
|
|
9
|
+
from agno.db.dynamo.utils import (
|
|
10
|
+
apply_pagination,
|
|
11
|
+
apply_sorting,
|
|
12
|
+
build_query_filter_expression,
|
|
13
|
+
build_topic_filter_expression,
|
|
14
|
+
calculate_date_metrics,
|
|
15
|
+
create_table_if_not_exists,
|
|
16
|
+
deserialize_cultural_knowledge_from_db,
|
|
17
|
+
deserialize_eval_record,
|
|
18
|
+
deserialize_from_dynamodb_item,
|
|
19
|
+
deserialize_knowledge_row,
|
|
20
|
+
deserialize_session,
|
|
21
|
+
deserialize_session_result,
|
|
22
|
+
execute_query_with_pagination,
|
|
23
|
+
fetch_all_sessions_data,
|
|
24
|
+
get_dates_to_calculate_metrics_for,
|
|
25
|
+
merge_with_existing_session,
|
|
26
|
+
prepare_session_data,
|
|
27
|
+
serialize_cultural_knowledge_for_db,
|
|
28
|
+
serialize_eval_record,
|
|
29
|
+
serialize_knowledge_row,
|
|
30
|
+
serialize_to_dynamo_item,
|
|
31
|
+
)
|
|
32
|
+
from agno.db.schemas.culture import CulturalKnowledge
|
|
33
|
+
from agno.db.schemas.evals import EvalFilterType, EvalRunRecord, EvalType
|
|
34
|
+
from agno.db.schemas.knowledge import KnowledgeRow
|
|
35
|
+
from agno.db.schemas.memory import UserMemory
|
|
36
|
+
from agno.session import AgentSession, Session, TeamSession, WorkflowSession
|
|
37
|
+
from agno.utils.log import log_debug, log_error, log_info
|
|
38
|
+
from agno.utils.string import generate_id
|
|
39
|
+
|
|
40
|
+
try:
|
|
41
|
+
import boto3 # type: ignore[import-untyped]
|
|
42
|
+
except ImportError:
|
|
43
|
+
raise ImportError("`boto3` not installed. Please install it using `pip install boto3`")
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
# DynamoDB batch_write_item has a hard limit of 25 items per request
|
|
47
|
+
DYNAMO_BATCH_SIZE_LIMIT = 25
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
class DynamoDb(BaseDb):
|
|
51
|
+
def __init__(
|
|
52
|
+
self,
|
|
53
|
+
db_client=None,
|
|
54
|
+
region_name: Optional[str] = None,
|
|
55
|
+
aws_access_key_id: Optional[str] = None,
|
|
56
|
+
aws_secret_access_key: Optional[str] = None,
|
|
57
|
+
session_table: Optional[str] = None,
|
|
58
|
+
culture_table: Optional[str] = None,
|
|
59
|
+
memory_table: Optional[str] = None,
|
|
60
|
+
metrics_table: Optional[str] = None,
|
|
61
|
+
eval_table: Optional[str] = None,
|
|
62
|
+
knowledge_table: Optional[str] = None,
|
|
63
|
+
id: Optional[str] = None,
|
|
64
|
+
):
|
|
65
|
+
"""
|
|
66
|
+
Interface for interacting with a DynamoDB database.
|
|
67
|
+
|
|
68
|
+
Args:
|
|
69
|
+
db_client: The DynamoDB client to use.
|
|
70
|
+
region_name: AWS region name.
|
|
71
|
+
aws_access_key_id: AWS access key ID.
|
|
72
|
+
aws_secret_access_key: AWS secret access key.
|
|
73
|
+
session_table: The name of the session table.
|
|
74
|
+
culture_table: The name of the culture table.
|
|
75
|
+
memory_table: The name of the memory table.
|
|
76
|
+
metrics_table: The name of the metrics table.
|
|
77
|
+
eval_table: The name of the eval table.
|
|
78
|
+
knowledge_table: The name of the knowledge table.
|
|
79
|
+
id: ID of the database.
|
|
80
|
+
"""
|
|
81
|
+
if id is None:
|
|
82
|
+
seed = str(db_client) if db_client else f"{region_name}_{aws_access_key_id}"
|
|
83
|
+
id = generate_id(seed)
|
|
84
|
+
|
|
85
|
+
super().__init__(
|
|
86
|
+
id=id,
|
|
87
|
+
session_table=session_table,
|
|
88
|
+
culture_table=culture_table,
|
|
89
|
+
memory_table=memory_table,
|
|
90
|
+
metrics_table=metrics_table,
|
|
91
|
+
eval_table=eval_table,
|
|
92
|
+
knowledge_table=knowledge_table,
|
|
93
|
+
)
|
|
94
|
+
|
|
95
|
+
if db_client is not None:
|
|
96
|
+
self.client = db_client
|
|
97
|
+
else:
|
|
98
|
+
if not region_name and not getenv("AWS_REGION"):
|
|
99
|
+
raise ValueError("AWS_REGION is not set. Please set the AWS_REGION environment variable.")
|
|
100
|
+
if not aws_access_key_id and not getenv("AWS_ACCESS_KEY_ID"):
|
|
101
|
+
raise ValueError("AWS_ACCESS_KEY_ID is not set. Please set the AWS_ACCESS_KEY_ID environment variable.")
|
|
102
|
+
if not aws_secret_access_key and not getenv("AWS_SECRET_ACCESS_KEY"):
|
|
103
|
+
raise ValueError(
|
|
104
|
+
"AWS_SECRET_ACCESS_KEY is not set. Please set the AWS_SECRET_ACCESS_KEY environment variable."
|
|
105
|
+
)
|
|
106
|
+
|
|
107
|
+
session_kwargs = {}
|
|
108
|
+
session_kwargs["region_name"] = region_name or getenv("AWS_REGION")
|
|
109
|
+
session_kwargs["aws_access_key_id"] = aws_access_key_id or getenv("AWS_ACCESS_KEY_ID")
|
|
110
|
+
session_kwargs["aws_secret_access_key"] = aws_secret_access_key or getenv("AWS_SECRET_ACCESS_KEY")
|
|
111
|
+
|
|
112
|
+
session = boto3.Session(**session_kwargs)
|
|
113
|
+
self.client = session.client("dynamodb")
|
|
114
|
+
|
|
115
|
+
def table_exists(self, table_name: str) -> bool:
|
|
116
|
+
"""Check if a DynamoDB table exists.
|
|
117
|
+
|
|
118
|
+
Args:
|
|
119
|
+
table_name: The name of the table to check
|
|
120
|
+
|
|
121
|
+
Returns:
|
|
122
|
+
bool: True if the table exists, False otherwise
|
|
123
|
+
"""
|
|
124
|
+
try:
|
|
125
|
+
self.client.describe_table(TableName=table_name)
|
|
126
|
+
return True
|
|
127
|
+
except self.client.exceptions.ResourceNotFoundException:
|
|
128
|
+
return False
|
|
129
|
+
|
|
130
|
+
def _create_all_tables(self):
|
|
131
|
+
"""Create all configured DynamoDB tables if they don't exist."""
|
|
132
|
+
tables_to_create = [
|
|
133
|
+
("sessions", self.session_table_name),
|
|
134
|
+
("memories", self.memory_table_name),
|
|
135
|
+
("metrics", self.metrics_table_name),
|
|
136
|
+
("evals", self.eval_table_name),
|
|
137
|
+
("knowledge", self.knowledge_table_name),
|
|
138
|
+
("culture", self.culture_table_name),
|
|
139
|
+
]
|
|
140
|
+
|
|
141
|
+
for table_type, table_name in tables_to_create:
|
|
142
|
+
if not self.table_exists(table_name):
|
|
143
|
+
schema = get_table_schema_definition(table_type)
|
|
144
|
+
schema["TableName"] = table_name
|
|
145
|
+
create_table_if_not_exists(self.client, table_name, schema)
|
|
146
|
+
|
|
147
|
+
def _get_table(self, table_type: str, create_table_if_not_found: Optional[bool] = True) -> Optional[str]:
|
|
148
|
+
"""
|
|
149
|
+
Get table name and ensure the table exists, creating it if needed.
|
|
150
|
+
|
|
151
|
+
Args:
|
|
152
|
+
table_type: Type of table ("sessions", "memories", "metrics", "evals", "knowledge_sources")
|
|
153
|
+
|
|
154
|
+
Returns:
|
|
155
|
+
str: The table name
|
|
156
|
+
|
|
157
|
+
Raises:
|
|
158
|
+
ValueError: If table name is not configured or table type is unknown
|
|
159
|
+
"""
|
|
160
|
+
table_name = None
|
|
161
|
+
|
|
162
|
+
if table_type == "sessions":
|
|
163
|
+
table_name = self.session_table_name
|
|
164
|
+
elif table_type == "memories":
|
|
165
|
+
table_name = self.memory_table_name
|
|
166
|
+
elif table_type == "metrics":
|
|
167
|
+
table_name = self.metrics_table_name
|
|
168
|
+
elif table_type == "evals":
|
|
169
|
+
table_name = self.eval_table_name
|
|
170
|
+
elif table_type == "knowledge":
|
|
171
|
+
table_name = self.knowledge_table_name
|
|
172
|
+
elif table_type == "culture":
|
|
173
|
+
table_name = self.culture_table_name
|
|
174
|
+
else:
|
|
175
|
+
raise ValueError(f"Unknown table type: {table_type}")
|
|
176
|
+
|
|
177
|
+
# Check if table exists, create if it doesn't
|
|
178
|
+
if not self.table_exists(table_name) and create_table_if_not_found:
|
|
179
|
+
schema = get_table_schema_definition(table_type)
|
|
180
|
+
schema["TableName"] = table_name
|
|
181
|
+
create_table_if_not_exists(self.client, table_name, schema)
|
|
182
|
+
|
|
183
|
+
return table_name
|
|
184
|
+
|
|
185
|
+
# --- Sessions ---
|
|
186
|
+
|
|
187
|
+
def delete_session(self, session_id: Optional[str] = None) -> bool:
|
|
188
|
+
"""
|
|
189
|
+
Delete a session from the database.
|
|
190
|
+
|
|
191
|
+
Args:
|
|
192
|
+
session_id: The ID of the session to delete.
|
|
193
|
+
|
|
194
|
+
Raises:
|
|
195
|
+
Exception: If any error occurs while deleting the session.
|
|
196
|
+
"""
|
|
197
|
+
if not session_id:
|
|
198
|
+
return False
|
|
199
|
+
|
|
200
|
+
try:
|
|
201
|
+
self.client.delete_item(
|
|
202
|
+
TableName=self.session_table_name,
|
|
203
|
+
Key={"session_id": {"S": session_id}},
|
|
204
|
+
)
|
|
205
|
+
return True
|
|
206
|
+
|
|
207
|
+
except Exception as e:
|
|
208
|
+
log_error(f"Failed to delete session {session_id}: {e}")
|
|
209
|
+
raise e
|
|
210
|
+
|
|
211
|
+
def delete_sessions(self, session_ids: List[str]) -> None:
|
|
212
|
+
"""
|
|
213
|
+
Delete sessions from the database in batches.
|
|
214
|
+
|
|
215
|
+
Args:
|
|
216
|
+
session_ids: List of session IDs to delete
|
|
217
|
+
|
|
218
|
+
Raises:
|
|
219
|
+
Exception: If any error occurs while deleting the sessions.
|
|
220
|
+
"""
|
|
221
|
+
if not session_ids or not self.session_table_name:
|
|
222
|
+
return
|
|
223
|
+
|
|
224
|
+
try:
|
|
225
|
+
# Process the items to delete in batches of the max allowed size or less
|
|
226
|
+
for i in range(0, len(session_ids), DYNAMO_BATCH_SIZE_LIMIT):
|
|
227
|
+
batch = session_ids[i : i + DYNAMO_BATCH_SIZE_LIMIT]
|
|
228
|
+
delete_requests = []
|
|
229
|
+
|
|
230
|
+
for session_id in batch:
|
|
231
|
+
delete_requests.append({"DeleteRequest": {"Key": {"session_id": {"S": session_id}}}})
|
|
232
|
+
|
|
233
|
+
if delete_requests:
|
|
234
|
+
self.client.batch_write_item(RequestItems={self.session_table_name: delete_requests})
|
|
235
|
+
|
|
236
|
+
except Exception as e:
|
|
237
|
+
log_error(f"Failed to delete sessions: {e}")
|
|
238
|
+
raise e
|
|
239
|
+
|
|
240
|
+
def get_session(
|
|
241
|
+
self,
|
|
242
|
+
session_id: str,
|
|
243
|
+
session_type: SessionType,
|
|
244
|
+
user_id: Optional[str] = None,
|
|
245
|
+
deserialize: Optional[bool] = True,
|
|
246
|
+
) -> Optional[Union[Session, Dict[str, Any]]]:
|
|
247
|
+
"""
|
|
248
|
+
Get a session from the database as a Session object.
|
|
249
|
+
|
|
250
|
+
Args:
|
|
251
|
+
session_id (str): The ID of the session to get.
|
|
252
|
+
session_type (SessionType): The type of session to get.
|
|
253
|
+
user_id (Optional[str]): The ID of the user to get the session for.
|
|
254
|
+
deserialize (Optional[bool]): Whether to deserialize the session.
|
|
255
|
+
|
|
256
|
+
Returns:
|
|
257
|
+
Optional[Session]: The session data as a Session object.
|
|
258
|
+
|
|
259
|
+
Raises:
|
|
260
|
+
Exception: If any error occurs while getting the session.
|
|
261
|
+
"""
|
|
262
|
+
try:
|
|
263
|
+
table_name = self._get_table("sessions")
|
|
264
|
+
response = self.client.get_item(
|
|
265
|
+
TableName=table_name,
|
|
266
|
+
Key={"session_id": {"S": session_id}},
|
|
267
|
+
)
|
|
268
|
+
|
|
269
|
+
item = response.get("Item")
|
|
270
|
+
if not item:
|
|
271
|
+
return None
|
|
272
|
+
|
|
273
|
+
session = deserialize_from_dynamodb_item(item)
|
|
274
|
+
|
|
275
|
+
if user_id and session.get("user_id") != user_id:
|
|
276
|
+
return None
|
|
277
|
+
|
|
278
|
+
if not session:
|
|
279
|
+
return None
|
|
280
|
+
|
|
281
|
+
if not deserialize:
|
|
282
|
+
return session
|
|
283
|
+
|
|
284
|
+
if session_type == SessionType.AGENT:
|
|
285
|
+
return AgentSession.from_dict(session)
|
|
286
|
+
elif session_type == SessionType.TEAM:
|
|
287
|
+
return TeamSession.from_dict(session)
|
|
288
|
+
elif session_type == SessionType.WORKFLOW:
|
|
289
|
+
return WorkflowSession.from_dict(session)
|
|
290
|
+
else:
|
|
291
|
+
raise ValueError(f"Invalid session type: {session_type}")
|
|
292
|
+
|
|
293
|
+
except Exception as e:
|
|
294
|
+
log_error(f"Failed to get session {session_id}: {e}")
|
|
295
|
+
raise e
|
|
296
|
+
|
|
297
|
+
def get_sessions(
|
|
298
|
+
self,
|
|
299
|
+
session_type: SessionType,
|
|
300
|
+
user_id: Optional[str] = None,
|
|
301
|
+
component_id: Optional[str] = None,
|
|
302
|
+
session_name: Optional[str] = None,
|
|
303
|
+
start_timestamp: Optional[int] = None,
|
|
304
|
+
end_timestamp: Optional[int] = None,
|
|
305
|
+
limit: Optional[int] = None,
|
|
306
|
+
page: Optional[int] = None,
|
|
307
|
+
sort_by: Optional[str] = None,
|
|
308
|
+
sort_order: Optional[str] = None,
|
|
309
|
+
deserialize: Optional[bool] = True,
|
|
310
|
+
) -> Union[List[Session], Tuple[List[Dict[str, Any]], int]]:
|
|
311
|
+
try:
|
|
312
|
+
table_name = self._get_table("sessions")
|
|
313
|
+
if table_name is None:
|
|
314
|
+
return [] if deserialize else ([], 0)
|
|
315
|
+
|
|
316
|
+
# Build filter expression for additional filters
|
|
317
|
+
filter_expression = None
|
|
318
|
+
expression_attribute_names = {}
|
|
319
|
+
expression_attribute_values = {":session_type": {"S": session_type.value}}
|
|
320
|
+
|
|
321
|
+
if user_id:
|
|
322
|
+
filter_expression = "#user_id = :user_id"
|
|
323
|
+
expression_attribute_names["#user_id"] = "user_id"
|
|
324
|
+
expression_attribute_values[":user_id"] = {"S": user_id}
|
|
325
|
+
|
|
326
|
+
if component_id:
|
|
327
|
+
# Map component_id to the appropriate field based on session type
|
|
328
|
+
if session_type == SessionType.AGENT:
|
|
329
|
+
component_filter = "#agent_id = :component_id"
|
|
330
|
+
expression_attribute_names["#agent_id"] = "agent_id"
|
|
331
|
+
elif session_type == SessionType.TEAM:
|
|
332
|
+
component_filter = "#team_id = :component_id"
|
|
333
|
+
expression_attribute_names["#team_id"] = "team_id"
|
|
334
|
+
else:
|
|
335
|
+
component_filter = "#workflow_id = :component_id"
|
|
336
|
+
expression_attribute_names["#workflow_id"] = "workflow_id"
|
|
337
|
+
|
|
338
|
+
if component_filter:
|
|
339
|
+
expression_attribute_values[":component_id"] = {"S": component_id}
|
|
340
|
+
if filter_expression:
|
|
341
|
+
filter_expression += f" AND {component_filter}"
|
|
342
|
+
else:
|
|
343
|
+
filter_expression = component_filter
|
|
344
|
+
|
|
345
|
+
if session_name:
|
|
346
|
+
name_filter = "#session_name = :session_name"
|
|
347
|
+
expression_attribute_names["#session_name"] = "session_name"
|
|
348
|
+
expression_attribute_values[":session_name"] = {"S": session_name}
|
|
349
|
+
if filter_expression:
|
|
350
|
+
filter_expression += f" AND {name_filter}"
|
|
351
|
+
else:
|
|
352
|
+
filter_expression = name_filter
|
|
353
|
+
|
|
354
|
+
# Use GSI query for session_type
|
|
355
|
+
query_kwargs = {
|
|
356
|
+
"TableName": table_name,
|
|
357
|
+
"IndexName": "session_type-created_at-index",
|
|
358
|
+
"KeyConditionExpression": "session_type = :session_type",
|
|
359
|
+
"ExpressionAttributeValues": expression_attribute_values,
|
|
360
|
+
}
|
|
361
|
+
if filter_expression:
|
|
362
|
+
query_kwargs["FilterExpression"] = filter_expression
|
|
363
|
+
if expression_attribute_names:
|
|
364
|
+
query_kwargs["ExpressionAttributeNames"] = expression_attribute_names
|
|
365
|
+
|
|
366
|
+
# Apply sorting
|
|
367
|
+
if sort_by == "created_at":
|
|
368
|
+
query_kwargs["ScanIndexForward"] = sort_order != "desc" # type: ignore
|
|
369
|
+
|
|
370
|
+
# Apply limit at DynamoDB level
|
|
371
|
+
if limit and not page:
|
|
372
|
+
query_kwargs["Limit"] = limit # type: ignore
|
|
373
|
+
|
|
374
|
+
items = []
|
|
375
|
+
response = self.client.query(**query_kwargs)
|
|
376
|
+
items.extend(response.get("Items", []))
|
|
377
|
+
|
|
378
|
+
# Handle pagination
|
|
379
|
+
while "LastEvaluatedKey" in response:
|
|
380
|
+
query_kwargs["ExclusiveStartKey"] = response["LastEvaluatedKey"]
|
|
381
|
+
response = self.client.query(**query_kwargs)
|
|
382
|
+
items.extend(response.get("Items", []))
|
|
383
|
+
|
|
384
|
+
# Convert DynamoDB items to session data
|
|
385
|
+
sessions_data = []
|
|
386
|
+
for item in items:
|
|
387
|
+
session_data = deserialize_from_dynamodb_item(item)
|
|
388
|
+
if session_data:
|
|
389
|
+
sessions_data.append(session_data)
|
|
390
|
+
|
|
391
|
+
# Apply in-memory sorting for fields not supported by DynamoDB
|
|
392
|
+
if sort_by and sort_by != "created_at":
|
|
393
|
+
sessions_data = apply_sorting(sessions_data, sort_by, sort_order)
|
|
394
|
+
|
|
395
|
+
# Get total count before pagination
|
|
396
|
+
total_count = len(sessions_data)
|
|
397
|
+
|
|
398
|
+
# Apply pagination
|
|
399
|
+
if page:
|
|
400
|
+
sessions_data = apply_pagination(sessions_data, limit, page)
|
|
401
|
+
|
|
402
|
+
if not deserialize:
|
|
403
|
+
return sessions_data, total_count
|
|
404
|
+
|
|
405
|
+
sessions = []
|
|
406
|
+
for session_data in sessions_data:
|
|
407
|
+
session = deserialize_session(session_data)
|
|
408
|
+
if session:
|
|
409
|
+
sessions.append(session)
|
|
410
|
+
|
|
411
|
+
return sessions
|
|
412
|
+
|
|
413
|
+
except Exception as e:
|
|
414
|
+
log_error(f"Failed to get sessions: {e}")
|
|
415
|
+
raise e
|
|
416
|
+
|
|
417
|
+
def rename_session(
|
|
418
|
+
self,
|
|
419
|
+
session_id: str,
|
|
420
|
+
session_type: SessionType,
|
|
421
|
+
session_name: str,
|
|
422
|
+
deserialize: Optional[bool] = True,
|
|
423
|
+
) -> Optional[Union[Session, Dict[str, Any]]]:
|
|
424
|
+
"""
|
|
425
|
+
Rename a session in the database.
|
|
426
|
+
|
|
427
|
+
Args:
|
|
428
|
+
session_id: The ID of the session to rename.
|
|
429
|
+
session_type: The type of session to rename.
|
|
430
|
+
session_name: The new name for the session.
|
|
431
|
+
|
|
432
|
+
Returns:
|
|
433
|
+
Optional[Session]: The renamed session if successful, None otherwise.
|
|
434
|
+
|
|
435
|
+
Raises:
|
|
436
|
+
Exception: If any error occurs while renaming the session.
|
|
437
|
+
"""
|
|
438
|
+
try:
|
|
439
|
+
if not self.session_table_name:
|
|
440
|
+
raise Exception("Sessions table not found")
|
|
441
|
+
|
|
442
|
+
# Get current session_data
|
|
443
|
+
get_response = self.client.get_item(
|
|
444
|
+
TableName=self.session_table_name,
|
|
445
|
+
Key={"session_id": {"S": session_id}},
|
|
446
|
+
)
|
|
447
|
+
current_item = get_response.get("Item")
|
|
448
|
+
if not current_item:
|
|
449
|
+
return None
|
|
450
|
+
|
|
451
|
+
# Update session_data with the new session_name
|
|
452
|
+
session_data = deserialize_from_dynamodb_item(current_item).get("session_data", {})
|
|
453
|
+
session_data["session_name"] = session_name
|
|
454
|
+
response = self.client.update_item(
|
|
455
|
+
TableName=self.session_table_name,
|
|
456
|
+
Key={"session_id": {"S": session_id}},
|
|
457
|
+
UpdateExpression="SET session_data = :session_data, updated_at = :updated_at",
|
|
458
|
+
ConditionExpression="session_type = :session_type",
|
|
459
|
+
ExpressionAttributeValues={
|
|
460
|
+
":session_data": {"S": json.dumps(session_data)},
|
|
461
|
+
":session_type": {"S": session_type.value},
|
|
462
|
+
":updated_at": {"N": str(int(time.time()))},
|
|
463
|
+
},
|
|
464
|
+
ReturnValues="ALL_NEW",
|
|
465
|
+
)
|
|
466
|
+
item = response.get("Attributes")
|
|
467
|
+
if not item:
|
|
468
|
+
return None
|
|
469
|
+
|
|
470
|
+
session = deserialize_from_dynamodb_item(item)
|
|
471
|
+
if not deserialize:
|
|
472
|
+
return session
|
|
473
|
+
|
|
474
|
+
if session_type == SessionType.AGENT:
|
|
475
|
+
return AgentSession.from_dict(session)
|
|
476
|
+
elif session_type == SessionType.TEAM:
|
|
477
|
+
return TeamSession.from_dict(session)
|
|
478
|
+
else:
|
|
479
|
+
return WorkflowSession.from_dict(session)
|
|
480
|
+
|
|
481
|
+
except Exception as e:
|
|
482
|
+
log_error(f"Failed to rename session {session_id}: {e}")
|
|
483
|
+
raise e
|
|
484
|
+
|
|
485
|
+
def upsert_session(
|
|
486
|
+
self, session: Session, deserialize: Optional[bool] = True
|
|
487
|
+
) -> Optional[Union[Session, Dict[str, Any]]]:
|
|
488
|
+
"""
|
|
489
|
+
Upsert a session into the database.
|
|
490
|
+
|
|
491
|
+
This method provides true upsert behavior: creates a new session if it doesn't exist,
|
|
492
|
+
or updates an existing session while preserving important fields.
|
|
493
|
+
|
|
494
|
+
Args:
|
|
495
|
+
session (Session): The session to upsert.
|
|
496
|
+
deserialize (Optional[bool]): Whether to deserialize the session.
|
|
497
|
+
|
|
498
|
+
Returns:
|
|
499
|
+
Optional[Session]: The upserted session if successful, None otherwise.
|
|
500
|
+
"""
|
|
501
|
+
try:
|
|
502
|
+
table_name = self._get_table("sessions", create_table_if_not_found=True)
|
|
503
|
+
|
|
504
|
+
# Get session if it already exists in the db.
|
|
505
|
+
# We need to do this to handle updating nested fields.
|
|
506
|
+
response = self.client.get_item(TableName=table_name, Key={"session_id": {"S": session.session_id}})
|
|
507
|
+
existing_item = response.get("Item")
|
|
508
|
+
|
|
509
|
+
# Prepare the session to upsert, merging with existing session if it exists.
|
|
510
|
+
serialized_session = prepare_session_data(session)
|
|
511
|
+
if existing_item:
|
|
512
|
+
serialized_session = merge_with_existing_session(serialized_session, existing_item)
|
|
513
|
+
serialized_session["updated_at"] = int(time.time())
|
|
514
|
+
else:
|
|
515
|
+
serialized_session["updated_at"] = serialized_session["created_at"]
|
|
516
|
+
|
|
517
|
+
# Upsert
|
|
518
|
+
item = serialize_to_dynamo_item(serialized_session)
|
|
519
|
+
self.client.put_item(TableName=table_name, Item=item)
|
|
520
|
+
|
|
521
|
+
return deserialize_session_result(serialized_session, session, deserialize)
|
|
522
|
+
|
|
523
|
+
except Exception as e:
|
|
524
|
+
log_error(f"Failed to upsert session {session.session_id}: {e}")
|
|
525
|
+
raise e
|
|
526
|
+
|
|
527
|
+
def upsert_sessions(
|
|
528
|
+
self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
|
|
529
|
+
) -> List[Union[Session, Dict[str, Any]]]:
|
|
530
|
+
"""
|
|
531
|
+
Bulk upsert multiple sessions for improved performance on large datasets.
|
|
532
|
+
|
|
533
|
+
Args:
|
|
534
|
+
sessions (List[Session]): List of sessions to upsert.
|
|
535
|
+
deserialize (Optional[bool]): Whether to deserialize the sessions. Defaults to True.
|
|
536
|
+
|
|
537
|
+
Returns:
|
|
538
|
+
List[Union[Session, Dict[str, Any]]]: List of upserted sessions.
|
|
539
|
+
|
|
540
|
+
Raises:
|
|
541
|
+
Exception: If an error occurs during bulk upsert.
|
|
542
|
+
"""
|
|
543
|
+
if not sessions:
|
|
544
|
+
return []
|
|
545
|
+
|
|
546
|
+
try:
|
|
547
|
+
log_info(
|
|
548
|
+
f"DynamoDb doesn't support efficient bulk operations, falling back to individual upserts for {len(sessions)} sessions"
|
|
549
|
+
)
|
|
550
|
+
|
|
551
|
+
# Fall back to individual upserts
|
|
552
|
+
results = []
|
|
553
|
+
for session in sessions:
|
|
554
|
+
if session is not None:
|
|
555
|
+
result = self.upsert_session(session, deserialize=deserialize)
|
|
556
|
+
if result is not None:
|
|
557
|
+
results.append(result)
|
|
558
|
+
return results
|
|
559
|
+
|
|
560
|
+
except Exception as e:
|
|
561
|
+
log_error(f"Exception during bulk session upsert: {e}")
|
|
562
|
+
return []
|
|
563
|
+
|
|
564
|
+
# --- User Memory ---
|
|
565
|
+
|
|
566
|
+
def delete_user_memory(self, memory_id: str, user_id: Optional[str] = None) -> None:
|
|
567
|
+
"""
|
|
568
|
+
Delete a user memory from the database.
|
|
569
|
+
|
|
570
|
+
Args:
|
|
571
|
+
memory_id: The ID of the memory to delete.
|
|
572
|
+
user_id: The ID of the user (optional, for filtering).
|
|
573
|
+
|
|
574
|
+
Raises:
|
|
575
|
+
Exception: If any error occurs while deleting the user memory.
|
|
576
|
+
"""
|
|
577
|
+
try:
|
|
578
|
+
# If user_id is provided, verify the memory belongs to the user before deleting
|
|
579
|
+
if user_id:
|
|
580
|
+
response = self.client.get_item(
|
|
581
|
+
TableName=self.memory_table_name,
|
|
582
|
+
Key={"memory_id": {"S": memory_id}},
|
|
583
|
+
)
|
|
584
|
+
item = response.get("Item")
|
|
585
|
+
if item:
|
|
586
|
+
memory_data = deserialize_from_dynamodb_item(item)
|
|
587
|
+
if memory_data.get("user_id") != user_id:
|
|
588
|
+
log_debug(f"Memory {memory_id} does not belong to user {user_id}")
|
|
589
|
+
return
|
|
590
|
+
|
|
591
|
+
self.client.delete_item(
|
|
592
|
+
TableName=self.memory_table_name,
|
|
593
|
+
Key={"memory_id": {"S": memory_id}},
|
|
594
|
+
)
|
|
595
|
+
log_debug(f"Deleted user memory {memory_id}")
|
|
596
|
+
|
|
597
|
+
except Exception as e:
|
|
598
|
+
log_error(f"Failed to delete user memory {memory_id}: {e}")
|
|
599
|
+
raise e
|
|
600
|
+
|
|
601
|
+
def delete_user_memories(self, memory_ids: List[str], user_id: Optional[str] = None) -> None:
|
|
602
|
+
"""
|
|
603
|
+
Delete user memories from the database in batches.
|
|
604
|
+
|
|
605
|
+
Args:
|
|
606
|
+
memory_ids: List of memory IDs to delete
|
|
607
|
+
user_id: The ID of the user (optional, for filtering).
|
|
608
|
+
|
|
609
|
+
Raises:
|
|
610
|
+
Exception: If any error occurs while deleting the user memories.
|
|
611
|
+
"""
|
|
612
|
+
|
|
613
|
+
try:
|
|
614
|
+
# If user_id is provided, filter memory_ids to only those belonging to the user
|
|
615
|
+
if user_id:
|
|
616
|
+
filtered_memory_ids = []
|
|
617
|
+
for memory_id in memory_ids:
|
|
618
|
+
response = self.client.get_item(
|
|
619
|
+
TableName=self.memory_table_name,
|
|
620
|
+
Key={"memory_id": {"S": memory_id}},
|
|
621
|
+
)
|
|
622
|
+
item = response.get("Item")
|
|
623
|
+
if item:
|
|
624
|
+
memory_data = deserialize_from_dynamodb_item(item)
|
|
625
|
+
if memory_data.get("user_id") == user_id:
|
|
626
|
+
filtered_memory_ids.append(memory_id)
|
|
627
|
+
memory_ids = filtered_memory_ids
|
|
628
|
+
|
|
629
|
+
for i in range(0, len(memory_ids), DYNAMO_BATCH_SIZE_LIMIT):
|
|
630
|
+
batch = memory_ids[i : i + DYNAMO_BATCH_SIZE_LIMIT]
|
|
631
|
+
|
|
632
|
+
delete_requests = []
|
|
633
|
+
for memory_id in batch:
|
|
634
|
+
delete_requests.append({"DeleteRequest": {"Key": {"memory_id": {"S": memory_id}}}})
|
|
635
|
+
|
|
636
|
+
self.client.batch_write_item(RequestItems={self.memory_table_name: delete_requests})
|
|
637
|
+
|
|
638
|
+
except Exception as e:
|
|
639
|
+
log_error(f"Failed to delete user memories: {e}")
|
|
640
|
+
raise e
|
|
641
|
+
|
|
642
|
+
def get_all_memory_topics(self) -> List[str]:
|
|
643
|
+
"""Get all memory topics from the database.
|
|
644
|
+
|
|
645
|
+
Args:
|
|
646
|
+
user_id: The ID of the user (optional, for filtering).
|
|
647
|
+
|
|
648
|
+
Returns:
|
|
649
|
+
List[str]: List of unique memory topics.
|
|
650
|
+
"""
|
|
651
|
+
try:
|
|
652
|
+
table_name = self._get_table("memories")
|
|
653
|
+
if table_name is None:
|
|
654
|
+
return []
|
|
655
|
+
|
|
656
|
+
# Build filter expression for user_id if provided
|
|
657
|
+
scan_kwargs = {"TableName": table_name}
|
|
658
|
+
|
|
659
|
+
# Scan the table to get memories
|
|
660
|
+
response = self.client.scan(**scan_kwargs)
|
|
661
|
+
items = response.get("Items", [])
|
|
662
|
+
|
|
663
|
+
# Handle pagination
|
|
664
|
+
while "LastEvaluatedKey" in response:
|
|
665
|
+
scan_kwargs["ExclusiveStartKey"] = response["LastEvaluatedKey"]
|
|
666
|
+
response = self.client.scan(**scan_kwargs)
|
|
667
|
+
items.extend(response.get("Items", []))
|
|
668
|
+
|
|
669
|
+
# Extract topics from all memories
|
|
670
|
+
all_topics = set()
|
|
671
|
+
for item in items:
|
|
672
|
+
memory_data = deserialize_from_dynamodb_item(item)
|
|
673
|
+
topics = memory_data.get("memory", {}).get("topics", [])
|
|
674
|
+
all_topics.update(topics)
|
|
675
|
+
|
|
676
|
+
return list(all_topics)
|
|
677
|
+
|
|
678
|
+
except Exception as e:
|
|
679
|
+
log_error(f"Exception reading from memory table: {e}")
|
|
680
|
+
raise e
|
|
681
|
+
|
|
682
|
+
def get_user_memory(
|
|
683
|
+
self,
|
|
684
|
+
memory_id: str,
|
|
685
|
+
deserialize: Optional[bool] = True,
|
|
686
|
+
user_id: Optional[str] = None,
|
|
687
|
+
) -> Optional[Union[UserMemory, Dict[str, Any]]]:
|
|
688
|
+
"""
|
|
689
|
+
Get a user memory from the database as a UserMemory object.
|
|
690
|
+
|
|
691
|
+
Args:
|
|
692
|
+
memory_id: The ID of the memory to get.
|
|
693
|
+
deserialize: Whether to deserialize the memory.
|
|
694
|
+
user_id: The ID of the user (optional, for filtering).
|
|
695
|
+
|
|
696
|
+
Returns:
|
|
697
|
+
Optional[UserMemory]: The user memory data if found, None otherwise.
|
|
698
|
+
|
|
699
|
+
Raises:
|
|
700
|
+
Exception: If any error occurs while getting the user memory.
|
|
701
|
+
"""
|
|
702
|
+
try:
|
|
703
|
+
table_name = self._get_table("memories")
|
|
704
|
+
response = self.client.get_item(TableName=table_name, Key={"memory_id": {"S": memory_id}})
|
|
705
|
+
|
|
706
|
+
item = response.get("Item")
|
|
707
|
+
if not item:
|
|
708
|
+
return None
|
|
709
|
+
|
|
710
|
+
item = deserialize_from_dynamodb_item(item)
|
|
711
|
+
|
|
712
|
+
# Filter by user_id if provided
|
|
713
|
+
if user_id and item.get("user_id") != user_id:
|
|
714
|
+
return None
|
|
715
|
+
|
|
716
|
+
if not deserialize:
|
|
717
|
+
return item
|
|
718
|
+
|
|
719
|
+
return UserMemory.from_dict(item)
|
|
720
|
+
|
|
721
|
+
except Exception as e:
|
|
722
|
+
log_error(f"Failed to get user memory {memory_id}: {e}")
|
|
723
|
+
raise e
|
|
724
|
+
|
|
725
|
+
def get_user_memories(
|
|
726
|
+
self,
|
|
727
|
+
user_id: Optional[str] = None,
|
|
728
|
+
agent_id: Optional[str] = None,
|
|
729
|
+
team_id: Optional[str] = None,
|
|
730
|
+
topics: Optional[List[str]] = None,
|
|
731
|
+
search_content: Optional[str] = None,
|
|
732
|
+
limit: Optional[int] = None,
|
|
733
|
+
page: Optional[int] = None,
|
|
734
|
+
sort_by: Optional[str] = None,
|
|
735
|
+
sort_order: Optional[str] = None,
|
|
736
|
+
deserialize: Optional[bool] = True,
|
|
737
|
+
) -> Union[List[UserMemory], Tuple[List[Dict[str, Any]], int]]:
|
|
738
|
+
"""
|
|
739
|
+
Get user memories from the database as a list of UserMemory objects.
|
|
740
|
+
|
|
741
|
+
Args:
|
|
742
|
+
user_id: The ID of the user to get the memories for.
|
|
743
|
+
agent_id: The ID of the agent to get the memories for.
|
|
744
|
+
team_id: The ID of the team to get the memories for.
|
|
745
|
+
workflow_id: The ID of the workflow to get the memories for.
|
|
746
|
+
topics: The topics to filter the memories by.
|
|
747
|
+
search_content: The content to search for in the memories.
|
|
748
|
+
limit: The maximum number of memories to return.
|
|
749
|
+
page: The page number to return.
|
|
750
|
+
sort_by: The field to sort the memories by.
|
|
751
|
+
sort_order: The order to sort the memories by.
|
|
752
|
+
deserialize: Whether to deserialize the memories.
|
|
753
|
+
|
|
754
|
+
Returns:
|
|
755
|
+
Union[List[UserMemory], List[Dict[str, Any]], Tuple[List[Dict[str, Any]], int]]: The user memories data.
|
|
756
|
+
|
|
757
|
+
Raises:
|
|
758
|
+
Exception: If any error occurs while getting the user memories.
|
|
759
|
+
"""
|
|
760
|
+
try:
|
|
761
|
+
table_name = self._get_table("memories")
|
|
762
|
+
if table_name is None:
|
|
763
|
+
return [] if deserialize else ([], 0)
|
|
764
|
+
|
|
765
|
+
# Build filter expressions for component filters
|
|
766
|
+
(
|
|
767
|
+
filter_expression,
|
|
768
|
+
expression_attribute_names,
|
|
769
|
+
expression_attribute_values,
|
|
770
|
+
) = build_query_filter_expression(filters={"agent_id": agent_id, "team_id": team_id})
|
|
771
|
+
|
|
772
|
+
# Build topic filter expression if topics provided
|
|
773
|
+
if topics:
|
|
774
|
+
topic_filter, topic_values = build_topic_filter_expression(topics)
|
|
775
|
+
expression_attribute_values.update(topic_values)
|
|
776
|
+
filter_expression = f"{filter_expression} AND {topic_filter}" if filter_expression else topic_filter
|
|
777
|
+
|
|
778
|
+
# Add search content filter if provided
|
|
779
|
+
if search_content:
|
|
780
|
+
search_filter = "contains(memory, :search_content)"
|
|
781
|
+
expression_attribute_values[":search_content"] = {"S": search_content}
|
|
782
|
+
filter_expression = f"{filter_expression} AND {search_filter}" if filter_expression else search_filter
|
|
783
|
+
|
|
784
|
+
# Determine whether to use GSI query or table scan
|
|
785
|
+
if user_id:
|
|
786
|
+
# Use GSI query when user_id is provided
|
|
787
|
+
key_condition_expression = "#user_id = :user_id"
|
|
788
|
+
|
|
789
|
+
# Set up expression attributes for GSI key condition
|
|
790
|
+
expression_attribute_names["#user_id"] = "user_id"
|
|
791
|
+
expression_attribute_values[":user_id"] = {"S": user_id}
|
|
792
|
+
|
|
793
|
+
# Execute query with pagination
|
|
794
|
+
items = execute_query_with_pagination(
|
|
795
|
+
self.client,
|
|
796
|
+
table_name,
|
|
797
|
+
"user_id-updated_at-index",
|
|
798
|
+
key_condition_expression,
|
|
799
|
+
expression_attribute_names,
|
|
800
|
+
expression_attribute_values,
|
|
801
|
+
filter_expression,
|
|
802
|
+
sort_by,
|
|
803
|
+
sort_order,
|
|
804
|
+
limit,
|
|
805
|
+
page,
|
|
806
|
+
)
|
|
807
|
+
else:
|
|
808
|
+
# Use table scan when user_id is None
|
|
809
|
+
scan_kwargs = {"TableName": table_name}
|
|
810
|
+
|
|
811
|
+
if filter_expression:
|
|
812
|
+
scan_kwargs["FilterExpression"] = filter_expression
|
|
813
|
+
if expression_attribute_names:
|
|
814
|
+
scan_kwargs["ExpressionAttributeNames"] = expression_attribute_names # type: ignore
|
|
815
|
+
if expression_attribute_values:
|
|
816
|
+
scan_kwargs["ExpressionAttributeValues"] = expression_attribute_values # type: ignore
|
|
817
|
+
|
|
818
|
+
# Execute scan
|
|
819
|
+
response = self.client.scan(**scan_kwargs)
|
|
820
|
+
items = response.get("Items", [])
|
|
821
|
+
|
|
822
|
+
# Handle pagination for scan
|
|
823
|
+
while "LastEvaluatedKey" in response:
|
|
824
|
+
scan_kwargs["ExclusiveStartKey"] = response["LastEvaluatedKey"]
|
|
825
|
+
response = self.client.scan(**scan_kwargs)
|
|
826
|
+
items.extend(response.get("Items", []))
|
|
827
|
+
|
|
828
|
+
items = [deserialize_from_dynamodb_item(item) for item in items]
|
|
829
|
+
|
|
830
|
+
if sort_by and sort_by != "updated_at":
|
|
831
|
+
items = apply_sorting(items, sort_by, sort_order)
|
|
832
|
+
|
|
833
|
+
if page:
|
|
834
|
+
paginated_items = apply_pagination(items, limit, page)
|
|
835
|
+
|
|
836
|
+
if not deserialize:
|
|
837
|
+
return paginated_items, len(items)
|
|
838
|
+
|
|
839
|
+
return [UserMemory.from_dict(item) for item in items]
|
|
840
|
+
|
|
841
|
+
except Exception as e:
|
|
842
|
+
log_error(f"Failed to get user memories: {e}")
|
|
843
|
+
raise e
|
|
844
|
+
|
|
845
|
+
def get_user_memory_stats(
|
|
846
|
+
self,
|
|
847
|
+
limit: Optional[int] = None,
|
|
848
|
+
page: Optional[int] = None,
|
|
849
|
+
) -> Tuple[List[Dict[str, Any]], int]:
|
|
850
|
+
"""Get user memories stats.
|
|
851
|
+
|
|
852
|
+
Args:
|
|
853
|
+
limit (Optional[int]): The maximum number of user stats to return.
|
|
854
|
+
page (Optional[int]): The page number.
|
|
855
|
+
user_id (Optional[str]): The ID of the user (optional, for filtering).
|
|
856
|
+
|
|
857
|
+
Returns:
|
|
858
|
+
Tuple[List[Dict[str, Any]], int]: A list of dictionaries containing user stats and total count.
|
|
859
|
+
|
|
860
|
+
Example:
|
|
861
|
+
(
|
|
862
|
+
[
|
|
863
|
+
{
|
|
864
|
+
"user_id": "123",
|
|
865
|
+
"total_memories": 10,
|
|
866
|
+
"last_memory_updated_at": 1714560000,
|
|
867
|
+
},
|
|
868
|
+
],
|
|
869
|
+
total_count: 1,
|
|
870
|
+
)
|
|
871
|
+
"""
|
|
872
|
+
try:
|
|
873
|
+
table_name = self._get_table("memories")
|
|
874
|
+
|
|
875
|
+
# Build filter expression for user_id if provided
|
|
876
|
+
scan_kwargs = {"TableName": table_name}
|
|
877
|
+
|
|
878
|
+
response = self.client.scan(**scan_kwargs)
|
|
879
|
+
items = response.get("Items", [])
|
|
880
|
+
|
|
881
|
+
# Handle pagination
|
|
882
|
+
while "LastEvaluatedKey" in response:
|
|
883
|
+
scan_kwargs["ExclusiveStartKey"] = response["LastEvaluatedKey"]
|
|
884
|
+
response = self.client.scan(**scan_kwargs)
|
|
885
|
+
items.extend(response.get("Items", []))
|
|
886
|
+
|
|
887
|
+
# Aggregate stats by user_id
|
|
888
|
+
user_stats = {}
|
|
889
|
+
for item in items:
|
|
890
|
+
memory_data = deserialize_from_dynamodb_item(item)
|
|
891
|
+
current_user_id = memory_data.get("user_id")
|
|
892
|
+
|
|
893
|
+
if current_user_id:
|
|
894
|
+
if current_user_id not in user_stats:
|
|
895
|
+
user_stats[current_user_id] = {
|
|
896
|
+
"user_id": current_user_id,
|
|
897
|
+
"total_memories": 0,
|
|
898
|
+
"last_memory_updated_at": None,
|
|
899
|
+
}
|
|
900
|
+
|
|
901
|
+
user_stats[current_user_id]["total_memories"] += 1
|
|
902
|
+
|
|
903
|
+
updated_at = memory_data.get("updated_at")
|
|
904
|
+
if updated_at:
|
|
905
|
+
updated_at_dt = datetime.fromisoformat(updated_at.replace("Z", "+00:00"))
|
|
906
|
+
updated_at_timestamp = int(updated_at_dt.timestamp())
|
|
907
|
+
|
|
908
|
+
if updated_at_timestamp and (
|
|
909
|
+
user_stats[current_user_id]["last_memory_updated_at"] is None
|
|
910
|
+
or updated_at_timestamp > user_stats[current_user_id]["last_memory_updated_at"]
|
|
911
|
+
):
|
|
912
|
+
user_stats[current_user_id]["last_memory_updated_at"] = updated_at_timestamp
|
|
913
|
+
|
|
914
|
+
# Convert to list and apply sorting
|
|
915
|
+
stats_list = list(user_stats.values())
|
|
916
|
+
stats_list.sort(
|
|
917
|
+
key=lambda x: (x["last_memory_updated_at"] if x["last_memory_updated_at"] is not None else 0),
|
|
918
|
+
reverse=True,
|
|
919
|
+
)
|
|
920
|
+
|
|
921
|
+
total_count = len(stats_list)
|
|
922
|
+
|
|
923
|
+
# Apply pagination
|
|
924
|
+
if limit is not None:
|
|
925
|
+
start_index = 0
|
|
926
|
+
if page is not None and page > 1:
|
|
927
|
+
start_index = (page - 1) * limit
|
|
928
|
+
stats_list = stats_list[start_index : start_index + limit]
|
|
929
|
+
|
|
930
|
+
return stats_list, total_count
|
|
931
|
+
|
|
932
|
+
except Exception as e:
|
|
933
|
+
log_error(f"Failed to get user memory stats: {e}")
|
|
934
|
+
raise e
|
|
935
|
+
|
|
936
|
+
def upsert_user_memory(
|
|
937
|
+
self, memory: UserMemory, deserialize: Optional[bool] = True
|
|
938
|
+
) -> Optional[Union[UserMemory, Dict[str, Any]]]:
|
|
939
|
+
"""
|
|
940
|
+
Upsert a user memory into the database.
|
|
941
|
+
|
|
942
|
+
Args:
|
|
943
|
+
memory: The memory to upsert.
|
|
944
|
+
|
|
945
|
+
Returns:
|
|
946
|
+
Optional[Dict[str, Any]]: The upserted memory data if successful, None otherwise.
|
|
947
|
+
"""
|
|
948
|
+
try:
|
|
949
|
+
table_name = self._get_table("memories", create_table_if_not_found=True)
|
|
950
|
+
memory_dict = memory.to_dict()
|
|
951
|
+
memory_dict["updated_at"] = datetime.now(timezone.utc).isoformat()
|
|
952
|
+
item = serialize_to_dynamo_item(memory_dict)
|
|
953
|
+
|
|
954
|
+
self.client.put_item(TableName=table_name, Item=item)
|
|
955
|
+
|
|
956
|
+
if not deserialize:
|
|
957
|
+
return memory_dict
|
|
958
|
+
|
|
959
|
+
return UserMemory.from_dict(memory_dict)
|
|
960
|
+
|
|
961
|
+
except Exception as e:
|
|
962
|
+
log_error(f"Failed to upsert user memory: {e}")
|
|
963
|
+
raise e
|
|
964
|
+
|
|
965
|
+
def upsert_memories(
|
|
966
|
+
self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
|
|
967
|
+
) -> List[Union[UserMemory, Dict[str, Any]]]:
|
|
968
|
+
"""
|
|
969
|
+
Bulk upsert multiple user memories for improved performance on large datasets.
|
|
970
|
+
|
|
971
|
+
Args:
|
|
972
|
+
memories (List[UserMemory]): List of memories to upsert.
|
|
973
|
+
deserialize (Optional[bool]): Whether to deserialize the memories. Defaults to True.
|
|
974
|
+
|
|
975
|
+
Returns:
|
|
976
|
+
List[Union[UserMemory, Dict[str, Any]]]: List of upserted memories.
|
|
977
|
+
|
|
978
|
+
Raises:
|
|
979
|
+
Exception: If an error occurs during bulk upsert.
|
|
980
|
+
"""
|
|
981
|
+
if not memories:
|
|
982
|
+
return []
|
|
983
|
+
|
|
984
|
+
try:
|
|
985
|
+
log_info(
|
|
986
|
+
f"DynamoDb doesn't support efficient bulk operations, falling back to individual upserts for {len(memories)} memories"
|
|
987
|
+
)
|
|
988
|
+
|
|
989
|
+
# Fall back to individual upserts
|
|
990
|
+
results = []
|
|
991
|
+
for memory in memories:
|
|
992
|
+
if memory is not None:
|
|
993
|
+
result = self.upsert_user_memory(memory, deserialize=deserialize)
|
|
994
|
+
if result is not None:
|
|
995
|
+
results.append(result)
|
|
996
|
+
return results
|
|
997
|
+
|
|
998
|
+
except Exception as e:
|
|
999
|
+
log_error(f"Exception during bulk memory upsert: {e}")
|
|
1000
|
+
return []
|
|
1001
|
+
|
|
1002
|
+
def clear_memories(self) -> None:
|
|
1003
|
+
"""Delete all memories from the database.
|
|
1004
|
+
|
|
1005
|
+
Raises:
|
|
1006
|
+
Exception: If an error occurs during deletion.
|
|
1007
|
+
"""
|
|
1008
|
+
try:
|
|
1009
|
+
table_name = self._get_table("memories")
|
|
1010
|
+
|
|
1011
|
+
# Scan the table to get all items
|
|
1012
|
+
response = self.client.scan(TableName=table_name)
|
|
1013
|
+
items = response.get("Items", [])
|
|
1014
|
+
|
|
1015
|
+
# Handle pagination for scan
|
|
1016
|
+
while "LastEvaluatedKey" in response:
|
|
1017
|
+
response = self.client.scan(TableName=table_name, ExclusiveStartKey=response["LastEvaluatedKey"])
|
|
1018
|
+
items.extend(response.get("Items", []))
|
|
1019
|
+
|
|
1020
|
+
if not items:
|
|
1021
|
+
return
|
|
1022
|
+
|
|
1023
|
+
# Delete items in batches
|
|
1024
|
+
for i in range(0, len(items), DYNAMO_BATCH_SIZE_LIMIT):
|
|
1025
|
+
batch = items[i : i + DYNAMO_BATCH_SIZE_LIMIT]
|
|
1026
|
+
|
|
1027
|
+
delete_requests = []
|
|
1028
|
+
for item in batch:
|
|
1029
|
+
# Extract the memory_id from the item
|
|
1030
|
+
memory_id = item.get("memory_id", {}).get("S")
|
|
1031
|
+
if memory_id:
|
|
1032
|
+
delete_requests.append({"DeleteRequest": {"Key": {"memory_id": {"S": memory_id}}}})
|
|
1033
|
+
|
|
1034
|
+
if delete_requests:
|
|
1035
|
+
self.client.batch_write_item(RequestItems={table_name: delete_requests})
|
|
1036
|
+
|
|
1037
|
+
except Exception as e:
|
|
1038
|
+
from agno.utils.log import log_warning
|
|
1039
|
+
|
|
1040
|
+
log_warning(f"Exception deleting all memories: {e}")
|
|
1041
|
+
raise e
|
|
1042
|
+
|
|
1043
|
+
# --- Metrics ---
|
|
1044
|
+
|
|
1045
|
+
def calculate_metrics(self) -> Optional[Any]:
|
|
1046
|
+
"""Calculate metrics for all dates without complete metrics.
|
|
1047
|
+
|
|
1048
|
+
Returns:
|
|
1049
|
+
Optional[Any]: The calculated metrics or None if no metrics table.
|
|
1050
|
+
|
|
1051
|
+
Raises:
|
|
1052
|
+
Exception: If an error occurs during metrics calculation.
|
|
1053
|
+
"""
|
|
1054
|
+
if not self.metrics_table_name:
|
|
1055
|
+
return None
|
|
1056
|
+
|
|
1057
|
+
try:
|
|
1058
|
+
from agno.utils.log import log_info
|
|
1059
|
+
|
|
1060
|
+
# Get starting date for metrics calculation
|
|
1061
|
+
starting_date = self._get_metrics_calculation_starting_date()
|
|
1062
|
+
if starting_date is None:
|
|
1063
|
+
log_info("No session data found. Won't calculate metrics.")
|
|
1064
|
+
return None
|
|
1065
|
+
|
|
1066
|
+
# Get dates that need metrics calculation
|
|
1067
|
+
dates_to_process = get_dates_to_calculate_metrics_for(starting_date)
|
|
1068
|
+
if not dates_to_process:
|
|
1069
|
+
log_info("Metrics already calculated for all relevant dates.")
|
|
1070
|
+
return None
|
|
1071
|
+
|
|
1072
|
+
# Get timestamp range for session data
|
|
1073
|
+
start_timestamp = int(datetime.combine(dates_to_process[0], datetime.min.time()).timestamp())
|
|
1074
|
+
end_timestamp = int(
|
|
1075
|
+
datetime.combine(dates_to_process[-1] + timedelta(days=1), datetime.min.time()).timestamp()
|
|
1076
|
+
)
|
|
1077
|
+
|
|
1078
|
+
# Get all sessions for the date range
|
|
1079
|
+
sessions = self._get_all_sessions_for_metrics_calculation(
|
|
1080
|
+
start_timestamp=start_timestamp, end_timestamp=end_timestamp
|
|
1081
|
+
)
|
|
1082
|
+
|
|
1083
|
+
# Process session data for metrics calculation
|
|
1084
|
+
|
|
1085
|
+
all_sessions_data = fetch_all_sessions_data(
|
|
1086
|
+
sessions=sessions,
|
|
1087
|
+
dates_to_process=dates_to_process,
|
|
1088
|
+
start_timestamp=start_timestamp,
|
|
1089
|
+
)
|
|
1090
|
+
|
|
1091
|
+
if not all_sessions_data:
|
|
1092
|
+
log_info("No new session data found. Won't calculate metrics.")
|
|
1093
|
+
return None
|
|
1094
|
+
|
|
1095
|
+
# Calculate metrics for each date
|
|
1096
|
+
results = []
|
|
1097
|
+
metrics_records = []
|
|
1098
|
+
for date_to_process in dates_to_process:
|
|
1099
|
+
date_key = date_to_process.isoformat()
|
|
1100
|
+
sessions_for_date = all_sessions_data.get(date_key, {})
|
|
1101
|
+
|
|
1102
|
+
# Skip dates with no sessions
|
|
1103
|
+
if not any(len(sessions) > 0 for sessions in sessions_for_date.values()):
|
|
1104
|
+
continue
|
|
1105
|
+
|
|
1106
|
+
metrics_record = calculate_date_metrics(date_to_process, sessions_for_date)
|
|
1107
|
+
metrics_records.append(metrics_record)
|
|
1108
|
+
|
|
1109
|
+
# Store metrics in DynamoDB
|
|
1110
|
+
if metrics_records:
|
|
1111
|
+
results = self._bulk_upsert_metrics(metrics_records)
|
|
1112
|
+
|
|
1113
|
+
log_debug("Updated metrics calculations")
|
|
1114
|
+
|
|
1115
|
+
return results
|
|
1116
|
+
|
|
1117
|
+
except Exception as e:
|
|
1118
|
+
log_error(f"Failed to calculate metrics: {e}")
|
|
1119
|
+
raise e
|
|
1120
|
+
|
|
1121
|
+
def _get_metrics_calculation_starting_date(self) -> Optional[date]:
|
|
1122
|
+
"""Get the first date for which metrics calculation is needed:
|
|
1123
|
+
1. If there are metrics records, return the date of the first day without a complete metrics record.
|
|
1124
|
+
2. If there are no metrics records, return the date of the first recorded session.
|
|
1125
|
+
3. If there are no metrics records and no sessions records, return None.
|
|
1126
|
+
|
|
1127
|
+
Returns:
|
|
1128
|
+
Optional[date]: The starting date for which metrics calculation is needed.
|
|
1129
|
+
"""
|
|
1130
|
+
try:
|
|
1131
|
+
metrics_table_name = self._get_table("metrics")
|
|
1132
|
+
|
|
1133
|
+
# 1. Check for existing metrics records
|
|
1134
|
+
response = self.client.scan(
|
|
1135
|
+
TableName=metrics_table_name,
|
|
1136
|
+
ProjectionExpression="#date, completed",
|
|
1137
|
+
ExpressionAttributeNames={"#date": "date"},
|
|
1138
|
+
Limit=1000, # Get reasonable number of records to find incomplete ones
|
|
1139
|
+
)
|
|
1140
|
+
|
|
1141
|
+
metrics_items = response.get("Items", [])
|
|
1142
|
+
|
|
1143
|
+
# Handle pagination to get all metrics records
|
|
1144
|
+
while "LastEvaluatedKey" in response:
|
|
1145
|
+
response = self.client.scan(
|
|
1146
|
+
TableName=metrics_table_name,
|
|
1147
|
+
ProjectionExpression="#date, completed",
|
|
1148
|
+
ExpressionAttributeNames={"#date": "date"},
|
|
1149
|
+
ExclusiveStartKey=response["LastEvaluatedKey"],
|
|
1150
|
+
Limit=1000,
|
|
1151
|
+
)
|
|
1152
|
+
metrics_items.extend(response.get("Items", []))
|
|
1153
|
+
|
|
1154
|
+
if metrics_items:
|
|
1155
|
+
# Find the latest date with metrics
|
|
1156
|
+
latest_complete_date = None
|
|
1157
|
+
incomplete_dates = []
|
|
1158
|
+
|
|
1159
|
+
for item in metrics_items:
|
|
1160
|
+
metrics_data = deserialize_from_dynamodb_item(item)
|
|
1161
|
+
record_date = datetime.fromisoformat(metrics_data["date"]).date()
|
|
1162
|
+
is_completed = metrics_data.get("completed", False)
|
|
1163
|
+
|
|
1164
|
+
if is_completed:
|
|
1165
|
+
if latest_complete_date is None or record_date > latest_complete_date:
|
|
1166
|
+
latest_complete_date = record_date
|
|
1167
|
+
else:
|
|
1168
|
+
incomplete_dates.append(record_date)
|
|
1169
|
+
|
|
1170
|
+
# Return the earliest incomplete date, or the day after the latest complete date
|
|
1171
|
+
if incomplete_dates:
|
|
1172
|
+
return min(incomplete_dates)
|
|
1173
|
+
elif latest_complete_date:
|
|
1174
|
+
return latest_complete_date + timedelta(days=1)
|
|
1175
|
+
|
|
1176
|
+
# 2. No metrics records. Return the date of the first recorded session.
|
|
1177
|
+
sessions_table_name = self._get_table("sessions")
|
|
1178
|
+
|
|
1179
|
+
earliest_session_date = None
|
|
1180
|
+
for session_type in ["agent", "team", "workflow"]:
|
|
1181
|
+
response = self.client.query(
|
|
1182
|
+
TableName=sessions_table_name,
|
|
1183
|
+
IndexName="session_type-created_at-index",
|
|
1184
|
+
KeyConditionExpression="session_type = :session_type",
|
|
1185
|
+
ExpressionAttributeValues={":session_type": {"S": session_type}},
|
|
1186
|
+
ScanIndexForward=True, # Ascending order to get earliest
|
|
1187
|
+
Limit=1,
|
|
1188
|
+
)
|
|
1189
|
+
|
|
1190
|
+
items = response.get("Items", [])
|
|
1191
|
+
if items:
|
|
1192
|
+
first_session = deserialize_from_dynamodb_item(items[0])
|
|
1193
|
+
first_session_timestamp = first_session.get("created_at")
|
|
1194
|
+
|
|
1195
|
+
if first_session_timestamp:
|
|
1196
|
+
session_date = datetime.fromtimestamp(first_session_timestamp, tz=timezone.utc).date()
|
|
1197
|
+
if earliest_session_date is None or session_date < earliest_session_date:
|
|
1198
|
+
earliest_session_date = session_date
|
|
1199
|
+
|
|
1200
|
+
# 3. Return the earliest session date or None if no sessions exist
|
|
1201
|
+
return earliest_session_date
|
|
1202
|
+
|
|
1203
|
+
except Exception as e:
|
|
1204
|
+
log_error(f"Failed to get metrics calculation starting date: {e}")
|
|
1205
|
+
raise e
|
|
1206
|
+
|
|
1207
|
+
def _get_all_sessions_for_metrics_calculation(
|
|
1208
|
+
self, start_timestamp: int, end_timestamp: int
|
|
1209
|
+
) -> List[Dict[str, Any]]:
|
|
1210
|
+
"""Get all sessions within a timestamp range for metrics calculation.
|
|
1211
|
+
|
|
1212
|
+
Args:
|
|
1213
|
+
start_timestamp: Start timestamp (inclusive)
|
|
1214
|
+
end_timestamp: End timestamp (exclusive)
|
|
1215
|
+
|
|
1216
|
+
Returns:
|
|
1217
|
+
List[Dict[str, Any]]: List of session data dictionaries
|
|
1218
|
+
"""
|
|
1219
|
+
try:
|
|
1220
|
+
table_name = self._get_table("sessions")
|
|
1221
|
+
all_sessions = []
|
|
1222
|
+
|
|
1223
|
+
# Query sessions by different types within the time range
|
|
1224
|
+
for session_type in ["agent", "team", "workflow"]:
|
|
1225
|
+
response = self.client.query(
|
|
1226
|
+
TableName=table_name,
|
|
1227
|
+
IndexName="session_type-created_at-index",
|
|
1228
|
+
KeyConditionExpression="session_type = :session_type AND created_at BETWEEN :start_ts AND :end_ts",
|
|
1229
|
+
ExpressionAttributeValues={
|
|
1230
|
+
":session_type": {"S": session_type},
|
|
1231
|
+
":start_ts": {"N": str(start_timestamp)},
|
|
1232
|
+
":end_ts": {"N": str(end_timestamp)},
|
|
1233
|
+
},
|
|
1234
|
+
)
|
|
1235
|
+
|
|
1236
|
+
items = response.get("Items", [])
|
|
1237
|
+
|
|
1238
|
+
# Handle pagination
|
|
1239
|
+
while "LastEvaluatedKey" in response:
|
|
1240
|
+
response = self.client.query(
|
|
1241
|
+
TableName=table_name,
|
|
1242
|
+
IndexName="session_type-created_at-index",
|
|
1243
|
+
KeyConditionExpression="session_type = :session_type AND created_at BETWEEN :start_ts AND :end_ts",
|
|
1244
|
+
ExpressionAttributeValues={
|
|
1245
|
+
":session_type": {"S": session_type},
|
|
1246
|
+
":start_ts": {"N": str(start_timestamp)},
|
|
1247
|
+
":end_ts": {"N": str(end_timestamp)},
|
|
1248
|
+
},
|
|
1249
|
+
ExclusiveStartKey=response["LastEvaluatedKey"],
|
|
1250
|
+
)
|
|
1251
|
+
items.extend(response.get("Items", []))
|
|
1252
|
+
|
|
1253
|
+
# Deserialize sessions
|
|
1254
|
+
for item in items:
|
|
1255
|
+
session_data = deserialize_from_dynamodb_item(item)
|
|
1256
|
+
if session_data:
|
|
1257
|
+
all_sessions.append(session_data)
|
|
1258
|
+
|
|
1259
|
+
return all_sessions
|
|
1260
|
+
|
|
1261
|
+
except Exception as e:
|
|
1262
|
+
log_error(f"Failed to get sessions for metrics calculation: {e}")
|
|
1263
|
+
raise e
|
|
1264
|
+
|
|
1265
|
+
def _bulk_upsert_metrics(self, metrics_records: List[Dict[str, Any]]) -> List[Dict[str, Any]]:
|
|
1266
|
+
"""Bulk upsert metrics records into DynamoDB with proper deduplication.
|
|
1267
|
+
|
|
1268
|
+
Args:
|
|
1269
|
+
metrics_records: List of metrics records to upsert
|
|
1270
|
+
|
|
1271
|
+
Returns:
|
|
1272
|
+
List[Dict[str, Any]]: List of upserted records
|
|
1273
|
+
"""
|
|
1274
|
+
try:
|
|
1275
|
+
table_name = self._get_table("metrics")
|
|
1276
|
+
if table_name is None:
|
|
1277
|
+
return []
|
|
1278
|
+
|
|
1279
|
+
results = []
|
|
1280
|
+
|
|
1281
|
+
# Process each record individually to handle proper upsert
|
|
1282
|
+
for record in metrics_records:
|
|
1283
|
+
upserted_record = self._upsert_single_metrics_record(table_name, record)
|
|
1284
|
+
if upserted_record:
|
|
1285
|
+
results.append(upserted_record)
|
|
1286
|
+
|
|
1287
|
+
return results
|
|
1288
|
+
|
|
1289
|
+
except Exception as e:
|
|
1290
|
+
log_error(f"Failed to bulk upsert metrics: {e}")
|
|
1291
|
+
raise e
|
|
1292
|
+
|
|
1293
|
+
def _upsert_single_metrics_record(self, table_name: str, record: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
|
1294
|
+
"""Upsert a single metrics record, checking for existing records with the same date.
|
|
1295
|
+
|
|
1296
|
+
Args:
|
|
1297
|
+
table_name: The DynamoDB table name
|
|
1298
|
+
record: The metrics record to upsert
|
|
1299
|
+
|
|
1300
|
+
Returns:
|
|
1301
|
+
Optional[Dict[str, Any]]: The upserted record or None if failed
|
|
1302
|
+
"""
|
|
1303
|
+
try:
|
|
1304
|
+
date_str = record.get("date")
|
|
1305
|
+
if not date_str:
|
|
1306
|
+
log_error("Metrics record missing date field")
|
|
1307
|
+
return None
|
|
1308
|
+
|
|
1309
|
+
# Convert date object to string if needed
|
|
1310
|
+
if hasattr(date_str, "isoformat"):
|
|
1311
|
+
date_str = date_str.isoformat()
|
|
1312
|
+
|
|
1313
|
+
# Check if a record already exists for this date
|
|
1314
|
+
existing_record = self._get_existing_metrics_record(table_name, date_str)
|
|
1315
|
+
|
|
1316
|
+
if existing_record:
|
|
1317
|
+
return self._update_existing_metrics_record(table_name, existing_record, record)
|
|
1318
|
+
else:
|
|
1319
|
+
return self._create_new_metrics_record(table_name, record)
|
|
1320
|
+
|
|
1321
|
+
except Exception as e:
|
|
1322
|
+
log_error(f"Failed to upsert single metrics record: {e}")
|
|
1323
|
+
raise e
|
|
1324
|
+
|
|
1325
|
+
def _get_existing_metrics_record(self, table_name: str, date_str: str) -> Optional[Dict[str, Any]]:
|
|
1326
|
+
"""Get existing metrics record for a given date.
|
|
1327
|
+
|
|
1328
|
+
Args:
|
|
1329
|
+
table_name: The DynamoDB table name
|
|
1330
|
+
date_str: The date string to search for
|
|
1331
|
+
|
|
1332
|
+
Returns:
|
|
1333
|
+
Optional[Dict[str, Any]]: The existing record or None if not found
|
|
1334
|
+
"""
|
|
1335
|
+
try:
|
|
1336
|
+
# Query using the date-aggregation_period-index
|
|
1337
|
+
response = self.client.query(
|
|
1338
|
+
TableName=table_name,
|
|
1339
|
+
IndexName="date-aggregation_period-index",
|
|
1340
|
+
KeyConditionExpression="#date = :date AND aggregation_period = :period",
|
|
1341
|
+
ExpressionAttributeNames={"#date": "date"},
|
|
1342
|
+
ExpressionAttributeValues={
|
|
1343
|
+
":date": {"S": date_str},
|
|
1344
|
+
":period": {"S": "daily"},
|
|
1345
|
+
},
|
|
1346
|
+
Limit=1,
|
|
1347
|
+
)
|
|
1348
|
+
|
|
1349
|
+
items = response.get("Items", [])
|
|
1350
|
+
if items:
|
|
1351
|
+
return deserialize_from_dynamodb_item(items[0])
|
|
1352
|
+
return None
|
|
1353
|
+
|
|
1354
|
+
except Exception as e:
|
|
1355
|
+
log_error(f"Failed to get existing metrics record for date {date_str}: {e}")
|
|
1356
|
+
raise e
|
|
1357
|
+
|
|
1358
|
+
def _update_existing_metrics_record(
|
|
1359
|
+
self,
|
|
1360
|
+
table_name: str,
|
|
1361
|
+
existing_record: Dict[str, Any],
|
|
1362
|
+
new_record: Dict[str, Any],
|
|
1363
|
+
) -> Optional[Dict[str, Any]]:
|
|
1364
|
+
"""Update an existing metrics record.
|
|
1365
|
+
|
|
1366
|
+
Args:
|
|
1367
|
+
table_name: The DynamoDB table name
|
|
1368
|
+
existing_record: The existing record
|
|
1369
|
+
new_record: The new record data
|
|
1370
|
+
|
|
1371
|
+
Returns:
|
|
1372
|
+
Optional[Dict[str, Any]]: The updated record or None if failed
|
|
1373
|
+
"""
|
|
1374
|
+
try:
|
|
1375
|
+
# Use the existing record's ID
|
|
1376
|
+
new_record["id"] = existing_record["id"]
|
|
1377
|
+
new_record["updated_at"] = int(time.time())
|
|
1378
|
+
|
|
1379
|
+
# Prepare and serialize the record
|
|
1380
|
+
prepared_record = self._prepare_metrics_record_for_dynamo(new_record)
|
|
1381
|
+
item = self._serialize_metrics_to_dynamo_item(prepared_record)
|
|
1382
|
+
|
|
1383
|
+
# Update the record
|
|
1384
|
+
self.client.put_item(TableName=table_name, Item=item)
|
|
1385
|
+
|
|
1386
|
+
return new_record
|
|
1387
|
+
|
|
1388
|
+
except Exception as e:
|
|
1389
|
+
log_error(f"Failed to update existing metrics record: {e}")
|
|
1390
|
+
raise e
|
|
1391
|
+
|
|
1392
|
+
def _create_new_metrics_record(self, table_name: str, record: Dict[str, Any]) -> Optional[Dict[str, Any]]:
|
|
1393
|
+
"""Create a new metrics record.
|
|
1394
|
+
|
|
1395
|
+
Args:
|
|
1396
|
+
table_name: The DynamoDB table name
|
|
1397
|
+
record: The record to create
|
|
1398
|
+
|
|
1399
|
+
Returns:
|
|
1400
|
+
Optional[Dict[str, Any]]: The created record or None if failed
|
|
1401
|
+
"""
|
|
1402
|
+
try:
|
|
1403
|
+
# Prepare and serialize the record
|
|
1404
|
+
prepared_record = self._prepare_metrics_record_for_dynamo(record)
|
|
1405
|
+
item = self._serialize_metrics_to_dynamo_item(prepared_record)
|
|
1406
|
+
|
|
1407
|
+
# Create the record
|
|
1408
|
+
self.client.put_item(TableName=table_name, Item=item)
|
|
1409
|
+
|
|
1410
|
+
return record
|
|
1411
|
+
|
|
1412
|
+
except Exception as e:
|
|
1413
|
+
log_error(f"Failed to create new metrics record: {e}")
|
|
1414
|
+
raise e
|
|
1415
|
+
|
|
1416
|
+
def _prepare_metrics_record_for_dynamo(self, record: Dict[str, Any]) -> Dict[str, Any]:
|
|
1417
|
+
"""Prepare a metrics record for DynamoDB serialization by converting all data types properly.
|
|
1418
|
+
|
|
1419
|
+
Args:
|
|
1420
|
+
record: The metrics record to prepare
|
|
1421
|
+
|
|
1422
|
+
Returns:
|
|
1423
|
+
Dict[str, Any]: The prepared record ready for DynamoDB serialization
|
|
1424
|
+
"""
|
|
1425
|
+
|
|
1426
|
+
def convert_value(value):
|
|
1427
|
+
"""Recursively convert values to DynamoDB-compatible types."""
|
|
1428
|
+
if value is None:
|
|
1429
|
+
return None
|
|
1430
|
+
elif isinstance(value, bool):
|
|
1431
|
+
return value
|
|
1432
|
+
elif isinstance(value, (int, float)):
|
|
1433
|
+
return value
|
|
1434
|
+
elif isinstance(value, str):
|
|
1435
|
+
return value
|
|
1436
|
+
elif hasattr(value, "isoformat"): # date/datetime objects
|
|
1437
|
+
return value.isoformat()
|
|
1438
|
+
elif isinstance(value, dict):
|
|
1439
|
+
return {k: convert_value(v) for k, v in value.items()}
|
|
1440
|
+
elif isinstance(value, list):
|
|
1441
|
+
return [convert_value(item) for item in value]
|
|
1442
|
+
else:
|
|
1443
|
+
return str(value)
|
|
1444
|
+
|
|
1445
|
+
return {key: convert_value(value) for key, value in record.items()}
|
|
1446
|
+
|
|
1447
|
+
def _serialize_metrics_to_dynamo_item(self, data: Dict[str, Any]) -> Dict[str, Any]:
|
|
1448
|
+
"""Serialize metrics data to DynamoDB item format with proper boolean handling.
|
|
1449
|
+
|
|
1450
|
+
Args:
|
|
1451
|
+
data: The metrics data to serialize
|
|
1452
|
+
|
|
1453
|
+
Returns:
|
|
1454
|
+
Dict[str, Any]: DynamoDB-ready item
|
|
1455
|
+
"""
|
|
1456
|
+
import json
|
|
1457
|
+
|
|
1458
|
+
item: Dict[str, Any] = {}
|
|
1459
|
+
for key, value in data.items():
|
|
1460
|
+
if value is not None:
|
|
1461
|
+
if isinstance(value, bool):
|
|
1462
|
+
item[key] = {"BOOL": value}
|
|
1463
|
+
elif isinstance(value, (int, float)):
|
|
1464
|
+
item[key] = {"N": str(value)}
|
|
1465
|
+
elif isinstance(value, str):
|
|
1466
|
+
item[key] = {"S": str(value)}
|
|
1467
|
+
elif isinstance(value, (dict, list)):
|
|
1468
|
+
item[key] = {"S": json.dumps(value)}
|
|
1469
|
+
else:
|
|
1470
|
+
item[key] = {"S": str(value)}
|
|
1471
|
+
return item
|
|
1472
|
+
|
|
1473
|
+
def get_metrics(
|
|
1474
|
+
self,
|
|
1475
|
+
starting_date: Optional[date] = None,
|
|
1476
|
+
ending_date: Optional[date] = None,
|
|
1477
|
+
) -> Tuple[List[Any], Optional[int]]:
|
|
1478
|
+
"""
|
|
1479
|
+
Get metrics from the database.
|
|
1480
|
+
|
|
1481
|
+
Args:
|
|
1482
|
+
starting_date: The starting date to filter metrics by.
|
|
1483
|
+
ending_date: The ending date to filter metrics by.
|
|
1484
|
+
|
|
1485
|
+
Returns:
|
|
1486
|
+
Tuple[List[Any], Optional[int]]: A tuple containing the metrics data and the total count.
|
|
1487
|
+
|
|
1488
|
+
Raises:
|
|
1489
|
+
Exception: If any error occurs while getting the metrics.
|
|
1490
|
+
"""
|
|
1491
|
+
|
|
1492
|
+
try:
|
|
1493
|
+
table_name = self._get_table("metrics")
|
|
1494
|
+
if table_name is None:
|
|
1495
|
+
return ([], None)
|
|
1496
|
+
|
|
1497
|
+
# Build query parameters
|
|
1498
|
+
scan_kwargs: Dict[str, Any] = {"TableName": table_name}
|
|
1499
|
+
|
|
1500
|
+
if starting_date or ending_date:
|
|
1501
|
+
filter_expressions = []
|
|
1502
|
+
expression_values = {}
|
|
1503
|
+
|
|
1504
|
+
if starting_date:
|
|
1505
|
+
filter_expressions.append("#date >= :start_date")
|
|
1506
|
+
expression_values[":start_date"] = {"S": starting_date.isoformat()}
|
|
1507
|
+
|
|
1508
|
+
if ending_date:
|
|
1509
|
+
filter_expressions.append("#date <= :end_date")
|
|
1510
|
+
expression_values[":end_date"] = {"S": ending_date.isoformat()}
|
|
1511
|
+
|
|
1512
|
+
scan_kwargs["FilterExpression"] = " AND ".join(filter_expressions)
|
|
1513
|
+
scan_kwargs["ExpressionAttributeNames"] = {"#date": "date"}
|
|
1514
|
+
scan_kwargs["ExpressionAttributeValues"] = expression_values
|
|
1515
|
+
|
|
1516
|
+
# Execute scan
|
|
1517
|
+
response = self.client.scan(**scan_kwargs)
|
|
1518
|
+
items = response.get("Items", [])
|
|
1519
|
+
|
|
1520
|
+
# Handle pagination
|
|
1521
|
+
while "LastEvaluatedKey" in response:
|
|
1522
|
+
scan_kwargs["ExclusiveStartKey"] = response["LastEvaluatedKey"]
|
|
1523
|
+
response = self.client.scan(**scan_kwargs)
|
|
1524
|
+
items.extend(response.get("Items", []))
|
|
1525
|
+
|
|
1526
|
+
# Convert to metrics data
|
|
1527
|
+
metrics_data = []
|
|
1528
|
+
for item in items:
|
|
1529
|
+
metric_data = deserialize_from_dynamodb_item(item)
|
|
1530
|
+
if metric_data:
|
|
1531
|
+
metrics_data.append(metric_data)
|
|
1532
|
+
|
|
1533
|
+
return metrics_data, len(metrics_data)
|
|
1534
|
+
|
|
1535
|
+
except Exception as e:
|
|
1536
|
+
log_error(f"Failed to get metrics: {e}")
|
|
1537
|
+
raise e
|
|
1538
|
+
|
|
1539
|
+
# --- Knowledge methods ---
|
|
1540
|
+
|
|
1541
|
+
def delete_knowledge_content(self, id: str):
|
|
1542
|
+
"""Delete a knowledge row from the database.
|
|
1543
|
+
|
|
1544
|
+
Args:
|
|
1545
|
+
id (str): The ID of the knowledge row to delete.
|
|
1546
|
+
|
|
1547
|
+
Raises:
|
|
1548
|
+
Exception: If an error occurs during deletion.
|
|
1549
|
+
"""
|
|
1550
|
+
try:
|
|
1551
|
+
table_name = self._get_table("knowledge")
|
|
1552
|
+
|
|
1553
|
+
self.client.delete_item(TableName=table_name, Key={"id": {"S": id}})
|
|
1554
|
+
|
|
1555
|
+
log_debug(f"Deleted knowledge content {id}")
|
|
1556
|
+
|
|
1557
|
+
except Exception as e:
|
|
1558
|
+
log_error(f"Failed to delete knowledge content {id}: {e}")
|
|
1559
|
+
raise e
|
|
1560
|
+
|
|
1561
|
+
def get_knowledge_content(self, id: str) -> Optional[KnowledgeRow]:
|
|
1562
|
+
"""Get a knowledge row from the database.
|
|
1563
|
+
|
|
1564
|
+
Args:
|
|
1565
|
+
id (str): The ID of the knowledge row to get.
|
|
1566
|
+
|
|
1567
|
+
Returns:
|
|
1568
|
+
Optional[KnowledgeRow]: The knowledge row, or None if it doesn't exist.
|
|
1569
|
+
"""
|
|
1570
|
+
try:
|
|
1571
|
+
table_name = self._get_table("knowledge")
|
|
1572
|
+
response = self.client.get_item(TableName=table_name, Key={"id": {"S": id}})
|
|
1573
|
+
|
|
1574
|
+
item = response.get("Item")
|
|
1575
|
+
if item:
|
|
1576
|
+
return deserialize_knowledge_row(item)
|
|
1577
|
+
|
|
1578
|
+
return None
|
|
1579
|
+
|
|
1580
|
+
except Exception as e:
|
|
1581
|
+
log_error(f"Failed to get knowledge content {id}: {e}")
|
|
1582
|
+
raise e
|
|
1583
|
+
|
|
1584
|
+
def get_knowledge_contents(
|
|
1585
|
+
self,
|
|
1586
|
+
limit: Optional[int] = None,
|
|
1587
|
+
page: Optional[int] = None,
|
|
1588
|
+
sort_by: Optional[str] = None,
|
|
1589
|
+
sort_order: Optional[str] = None,
|
|
1590
|
+
) -> Tuple[List[KnowledgeRow], int]:
|
|
1591
|
+
"""Get all knowledge contents from the database.
|
|
1592
|
+
|
|
1593
|
+
Args:
|
|
1594
|
+
limit (Optional[int]): The maximum number of knowledge contents to return.
|
|
1595
|
+
page (Optional[int]): The page number.
|
|
1596
|
+
sort_by (Optional[str]): The column to sort by.
|
|
1597
|
+
sort_order (Optional[str]): The order to sort by.
|
|
1598
|
+
create_table_if_not_found (Optional[bool]): Whether to create the table if it doesn't exist.
|
|
1599
|
+
|
|
1600
|
+
Returns:
|
|
1601
|
+
Tuple[List[KnowledgeRow], int]: The knowledge contents and total count.
|
|
1602
|
+
|
|
1603
|
+
Raises:
|
|
1604
|
+
Exception: If an error occurs during retrieval.
|
|
1605
|
+
"""
|
|
1606
|
+
try:
|
|
1607
|
+
table_name = self._get_table("knowledge")
|
|
1608
|
+
if table_name is None:
|
|
1609
|
+
return [], 0
|
|
1610
|
+
|
|
1611
|
+
response = self.client.scan(TableName=table_name)
|
|
1612
|
+
items = response.get("Items", [])
|
|
1613
|
+
|
|
1614
|
+
# Handle pagination
|
|
1615
|
+
while "LastEvaluatedKey" in response:
|
|
1616
|
+
response = self.client.scan(
|
|
1617
|
+
TableName=table_name,
|
|
1618
|
+
ExclusiveStartKey=response["LastEvaluatedKey"],
|
|
1619
|
+
)
|
|
1620
|
+
items.extend(response.get("Items", []))
|
|
1621
|
+
|
|
1622
|
+
# Convert to knowledge rows
|
|
1623
|
+
knowledge_rows = []
|
|
1624
|
+
for item in items:
|
|
1625
|
+
try:
|
|
1626
|
+
knowledge_row = deserialize_knowledge_row(item)
|
|
1627
|
+
knowledge_rows.append(knowledge_row)
|
|
1628
|
+
except Exception as e:
|
|
1629
|
+
log_error(f"Failed to deserialize knowledge row: {e}")
|
|
1630
|
+
|
|
1631
|
+
# Apply sorting
|
|
1632
|
+
if sort_by:
|
|
1633
|
+
reverse = sort_order == "desc"
|
|
1634
|
+
knowledge_rows = sorted(
|
|
1635
|
+
knowledge_rows,
|
|
1636
|
+
key=lambda x: getattr(x, sort_by, ""),
|
|
1637
|
+
reverse=reverse,
|
|
1638
|
+
)
|
|
1639
|
+
|
|
1640
|
+
# Get total count before pagination
|
|
1641
|
+
total_count = len(knowledge_rows)
|
|
1642
|
+
|
|
1643
|
+
# Apply pagination
|
|
1644
|
+
if limit:
|
|
1645
|
+
start_index = 0
|
|
1646
|
+
if page and page > 1:
|
|
1647
|
+
start_index = (page - 1) * limit
|
|
1648
|
+
knowledge_rows = knowledge_rows[start_index : start_index + limit]
|
|
1649
|
+
|
|
1650
|
+
return knowledge_rows, total_count
|
|
1651
|
+
|
|
1652
|
+
except Exception as e:
|
|
1653
|
+
log_error(f"Failed to get knowledge contents: {e}")
|
|
1654
|
+
raise e
|
|
1655
|
+
|
|
1656
|
+
def upsert_knowledge_content(self, knowledge_row: KnowledgeRow):
|
|
1657
|
+
"""Upsert knowledge content in the database.
|
|
1658
|
+
|
|
1659
|
+
Args:
|
|
1660
|
+
knowledge_row (KnowledgeRow): The knowledge row to upsert.
|
|
1661
|
+
|
|
1662
|
+
Returns:
|
|
1663
|
+
Optional[KnowledgeRow]: The upserted knowledge row, or None if the operation fails.
|
|
1664
|
+
"""
|
|
1665
|
+
try:
|
|
1666
|
+
table_name = self._get_table("knowledge", create_table_if_not_found=True)
|
|
1667
|
+
item = serialize_knowledge_row(knowledge_row)
|
|
1668
|
+
|
|
1669
|
+
self.client.put_item(TableName=table_name, Item=item)
|
|
1670
|
+
|
|
1671
|
+
return knowledge_row
|
|
1672
|
+
|
|
1673
|
+
except Exception as e:
|
|
1674
|
+
log_error(f"Failed to upsert knowledge content {knowledge_row.id}: {e}")
|
|
1675
|
+
raise e
|
|
1676
|
+
|
|
1677
|
+
# --- Eval ---
|
|
1678
|
+
|
|
1679
|
+
def create_eval_run(self, eval_run: EvalRunRecord) -> Optional[EvalRunRecord]:
|
|
1680
|
+
"""Create an eval run in the database.
|
|
1681
|
+
|
|
1682
|
+
Args:
|
|
1683
|
+
eval_run (EvalRunRecord): The eval run to create.
|
|
1684
|
+
|
|
1685
|
+
Returns:
|
|
1686
|
+
Optional[EvalRunRecord]: The created eval run, or None if the operation fails.
|
|
1687
|
+
|
|
1688
|
+
Raises:
|
|
1689
|
+
Exception: If an error occurs during creation.
|
|
1690
|
+
"""
|
|
1691
|
+
try:
|
|
1692
|
+
table_name = self._get_table("evals", create_table_if_not_found=True)
|
|
1693
|
+
|
|
1694
|
+
item = serialize_eval_record(eval_run)
|
|
1695
|
+
current_time = int(datetime.now(timezone.utc).timestamp())
|
|
1696
|
+
item["created_at"] = {"N": str(current_time)}
|
|
1697
|
+
item["updated_at"] = {"N": str(current_time)}
|
|
1698
|
+
|
|
1699
|
+
self.client.put_item(TableName=table_name, Item=item)
|
|
1700
|
+
|
|
1701
|
+
return eval_run
|
|
1702
|
+
|
|
1703
|
+
except Exception as e:
|
|
1704
|
+
log_error(f"Failed to create eval run: {e}")
|
|
1705
|
+
raise e
|
|
1706
|
+
|
|
1707
|
+
def delete_eval_runs(self, eval_run_ids: List[str]) -> None:
|
|
1708
|
+
if not eval_run_ids or not self.eval_table_name:
|
|
1709
|
+
return
|
|
1710
|
+
|
|
1711
|
+
try:
|
|
1712
|
+
for i in range(0, len(eval_run_ids), DYNAMO_BATCH_SIZE_LIMIT):
|
|
1713
|
+
batch = eval_run_ids[i : i + DYNAMO_BATCH_SIZE_LIMIT]
|
|
1714
|
+
|
|
1715
|
+
delete_requests = []
|
|
1716
|
+
for eval_run_id in batch:
|
|
1717
|
+
delete_requests.append({"DeleteRequest": {"Key": {"run_id": {"S": eval_run_id}}}})
|
|
1718
|
+
|
|
1719
|
+
self.client.batch_write_item(RequestItems={self.eval_table_name: delete_requests})
|
|
1720
|
+
|
|
1721
|
+
except Exception as e:
|
|
1722
|
+
log_error(f"Failed to delete eval runs: {e}")
|
|
1723
|
+
raise e
|
|
1724
|
+
|
|
1725
|
+
def get_eval_run_raw(self, eval_run_id: str, table: Optional[Any] = None) -> Optional[Dict[str, Any]]:
|
|
1726
|
+
if not self.eval_table_name:
|
|
1727
|
+
return None
|
|
1728
|
+
|
|
1729
|
+
try:
|
|
1730
|
+
response = self.client.get_item(TableName=self.eval_table_name, Key={"run_id": {"S": eval_run_id}})
|
|
1731
|
+
|
|
1732
|
+
item = response.get("Item")
|
|
1733
|
+
if item:
|
|
1734
|
+
return deserialize_from_dynamodb_item(item)
|
|
1735
|
+
return None
|
|
1736
|
+
|
|
1737
|
+
except Exception as e:
|
|
1738
|
+
log_error(f"Failed to get eval run {eval_run_id}: {e}")
|
|
1739
|
+
raise e
|
|
1740
|
+
|
|
1741
|
+
def get_eval_run(self, eval_run_id: str, table: Optional[Any] = None) -> Optional[EvalRunRecord]:
|
|
1742
|
+
if not self.eval_table_name:
|
|
1743
|
+
return None
|
|
1744
|
+
|
|
1745
|
+
try:
|
|
1746
|
+
response = self.client.get_item(TableName=self.eval_table_name, Key={"run_id": {"S": eval_run_id}})
|
|
1747
|
+
|
|
1748
|
+
item = response.get("Item")
|
|
1749
|
+
if item:
|
|
1750
|
+
return deserialize_eval_record(item)
|
|
1751
|
+
return None
|
|
1752
|
+
|
|
1753
|
+
except Exception as e:
|
|
1754
|
+
log_error(f"Failed to get eval run {eval_run_id}: {e}")
|
|
1755
|
+
raise e
|
|
1756
|
+
|
|
1757
|
+
def get_eval_runs(
|
|
1758
|
+
self,
|
|
1759
|
+
limit: Optional[int] = None,
|
|
1760
|
+
page: Optional[int] = None,
|
|
1761
|
+
sort_by: Optional[str] = None,
|
|
1762
|
+
sort_order: Optional[str] = None,
|
|
1763
|
+
agent_id: Optional[str] = None,
|
|
1764
|
+
team_id: Optional[str] = None,
|
|
1765
|
+
workflow_id: Optional[str] = None,
|
|
1766
|
+
model_id: Optional[str] = None,
|
|
1767
|
+
filter_type: Optional[EvalFilterType] = None,
|
|
1768
|
+
eval_type: Optional[List[EvalType]] = None,
|
|
1769
|
+
deserialize: Optional[bool] = True,
|
|
1770
|
+
) -> Union[List[EvalRunRecord], Tuple[List[Dict[str, Any]], int]]:
|
|
1771
|
+
try:
|
|
1772
|
+
table_name = self._get_table("evals")
|
|
1773
|
+
if table_name is None:
|
|
1774
|
+
return [] if deserialize else ([], 0)
|
|
1775
|
+
|
|
1776
|
+
scan_kwargs = {"TableName": table_name}
|
|
1777
|
+
|
|
1778
|
+
filter_expressions = []
|
|
1779
|
+
expression_values = {}
|
|
1780
|
+
|
|
1781
|
+
if agent_id:
|
|
1782
|
+
filter_expressions.append("agent_id = :agent_id")
|
|
1783
|
+
expression_values[":agent_id"] = {"S": agent_id}
|
|
1784
|
+
|
|
1785
|
+
if team_id:
|
|
1786
|
+
filter_expressions.append("team_id = :team_id")
|
|
1787
|
+
expression_values[":team_id"] = {"S": team_id}
|
|
1788
|
+
|
|
1789
|
+
if workflow_id:
|
|
1790
|
+
filter_expressions.append("workflow_id = :workflow_id")
|
|
1791
|
+
expression_values[":workflow_id"] = {"S": workflow_id}
|
|
1792
|
+
|
|
1793
|
+
if model_id:
|
|
1794
|
+
filter_expressions.append("model_id = :model_id")
|
|
1795
|
+
expression_values[":model_id"] = {"S": model_id}
|
|
1796
|
+
|
|
1797
|
+
if eval_type is not None and len(eval_type) > 0:
|
|
1798
|
+
eval_type_conditions = []
|
|
1799
|
+
for i, et in enumerate(eval_type):
|
|
1800
|
+
param_name = f":eval_type_{i}"
|
|
1801
|
+
eval_type_conditions.append(f"eval_type = {param_name}")
|
|
1802
|
+
expression_values[param_name] = {"S": str(et.value)}
|
|
1803
|
+
filter_expressions.append(f"({' OR '.join(eval_type_conditions)})")
|
|
1804
|
+
|
|
1805
|
+
if filter_type is not None:
|
|
1806
|
+
if filter_type == EvalFilterType.AGENT:
|
|
1807
|
+
filter_expressions.append("attribute_exists(agent_id)")
|
|
1808
|
+
elif filter_type == EvalFilterType.TEAM:
|
|
1809
|
+
filter_expressions.append("attribute_exists(team_id)")
|
|
1810
|
+
elif filter_type == EvalFilterType.WORKFLOW:
|
|
1811
|
+
filter_expressions.append("attribute_exists(workflow_id)")
|
|
1812
|
+
|
|
1813
|
+
if filter_expressions:
|
|
1814
|
+
scan_kwargs["FilterExpression"] = " AND ".join(filter_expressions)
|
|
1815
|
+
|
|
1816
|
+
if expression_values:
|
|
1817
|
+
scan_kwargs["ExpressionAttributeValues"] = expression_values # type: ignore
|
|
1818
|
+
|
|
1819
|
+
# Execute scan
|
|
1820
|
+
response = self.client.scan(**scan_kwargs)
|
|
1821
|
+
items = response.get("Items", [])
|
|
1822
|
+
|
|
1823
|
+
# Handle pagination
|
|
1824
|
+
while "LastEvaluatedKey" in response:
|
|
1825
|
+
scan_kwargs["ExclusiveStartKey"] = response["LastEvaluatedKey"]
|
|
1826
|
+
response = self.client.scan(**scan_kwargs)
|
|
1827
|
+
items.extend(response.get("Items", []))
|
|
1828
|
+
|
|
1829
|
+
# Convert to eval data
|
|
1830
|
+
eval_data = []
|
|
1831
|
+
for item in items:
|
|
1832
|
+
eval_item = deserialize_from_dynamodb_item(item)
|
|
1833
|
+
if eval_item:
|
|
1834
|
+
eval_data.append(eval_item)
|
|
1835
|
+
|
|
1836
|
+
# Apply sorting
|
|
1837
|
+
eval_data = apply_sorting(eval_data, sort_by, sort_order)
|
|
1838
|
+
|
|
1839
|
+
# Get total count before pagination
|
|
1840
|
+
total_count = len(eval_data)
|
|
1841
|
+
|
|
1842
|
+
# Apply pagination
|
|
1843
|
+
eval_data = apply_pagination(eval_data, limit, page)
|
|
1844
|
+
|
|
1845
|
+
if not deserialize:
|
|
1846
|
+
return eval_data, total_count
|
|
1847
|
+
|
|
1848
|
+
eval_runs = []
|
|
1849
|
+
for eval_item in eval_data:
|
|
1850
|
+
eval_run = EvalRunRecord.model_validate(eval_item)
|
|
1851
|
+
eval_runs.append(eval_run)
|
|
1852
|
+
return eval_runs
|
|
1853
|
+
|
|
1854
|
+
except Exception as e:
|
|
1855
|
+
log_error(f"Failed to get eval runs: {e}")
|
|
1856
|
+
raise e
|
|
1857
|
+
|
|
1858
|
+
def rename_eval_run(
|
|
1859
|
+
self, eval_run_id: str, name: str, deserialize: Optional[bool] = True
|
|
1860
|
+
) -> Optional[Union[EvalRunRecord, Dict[str, Any]]]:
|
|
1861
|
+
if not self.eval_table_name:
|
|
1862
|
+
return None
|
|
1863
|
+
|
|
1864
|
+
try:
|
|
1865
|
+
response = self.client.update_item(
|
|
1866
|
+
TableName=self.eval_table_name,
|
|
1867
|
+
Key={"run_id": {"S": eval_run_id}},
|
|
1868
|
+
UpdateExpression="SET #name = :name, updated_at = :updated_at",
|
|
1869
|
+
ExpressionAttributeNames={"#name": "name"},
|
|
1870
|
+
ExpressionAttributeValues={
|
|
1871
|
+
":name": {"S": name},
|
|
1872
|
+
":updated_at": {"N": str(int(time.time()))},
|
|
1873
|
+
},
|
|
1874
|
+
ReturnValues="ALL_NEW",
|
|
1875
|
+
)
|
|
1876
|
+
|
|
1877
|
+
item = response.get("Attributes")
|
|
1878
|
+
if item is None:
|
|
1879
|
+
return None
|
|
1880
|
+
|
|
1881
|
+
log_debug(f"Renamed eval run with id '{eval_run_id}' to '{name}'")
|
|
1882
|
+
|
|
1883
|
+
item = deserialize_from_dynamodb_item(item)
|
|
1884
|
+
return EvalRunRecord.model_validate(item) if deserialize else item
|
|
1885
|
+
|
|
1886
|
+
except Exception as e:
|
|
1887
|
+
log_error(f"Failed to rename eval run {eval_run_id}: {e}")
|
|
1888
|
+
raise e
|
|
1889
|
+
|
|
1890
|
+
# -- Culture methods --
|
|
1891
|
+
|
|
1892
|
+
def clear_cultural_knowledge(self) -> None:
|
|
1893
|
+
"""Delete all cultural knowledge from the database."""
|
|
1894
|
+
try:
|
|
1895
|
+
table_name = self._get_table("culture")
|
|
1896
|
+
response = self.client.scan(TableName=table_name, ProjectionExpression="id")
|
|
1897
|
+
|
|
1898
|
+
with self.client.batch_writer(table_name) as batch:
|
|
1899
|
+
for item in response.get("Items", []):
|
|
1900
|
+
batch.delete_item(Key={"id": item["id"]})
|
|
1901
|
+
except Exception as e:
|
|
1902
|
+
log_error(f"Failed to clear cultural knowledge: {e}")
|
|
1903
|
+
raise e
|
|
1904
|
+
|
|
1905
|
+
def delete_cultural_knowledge(self, id: str) -> None:
|
|
1906
|
+
"""Delete a cultural knowledge entry from the database."""
|
|
1907
|
+
try:
|
|
1908
|
+
table_name = self._get_table("culture")
|
|
1909
|
+
self.client.delete_item(TableName=table_name, Key={"id": {"S": id}})
|
|
1910
|
+
except Exception as e:
|
|
1911
|
+
log_error(f"Failed to delete cultural knowledge {id}: {e}")
|
|
1912
|
+
raise e
|
|
1913
|
+
|
|
1914
|
+
def get_cultural_knowledge(
|
|
1915
|
+
self, id: str, deserialize: Optional[bool] = True
|
|
1916
|
+
) -> Optional[Union[CulturalKnowledge, Dict[str, Any]]]:
|
|
1917
|
+
"""Get a cultural knowledge entry from the database."""
|
|
1918
|
+
try:
|
|
1919
|
+
table_name = self._get_table("culture")
|
|
1920
|
+
response = self.client.get_item(TableName=table_name, Key={"id": {"S": id}})
|
|
1921
|
+
|
|
1922
|
+
item = response.get("Item")
|
|
1923
|
+
if not item:
|
|
1924
|
+
return None
|
|
1925
|
+
|
|
1926
|
+
db_row = deserialize_from_dynamodb_item(item)
|
|
1927
|
+
if not deserialize:
|
|
1928
|
+
return db_row
|
|
1929
|
+
|
|
1930
|
+
return deserialize_cultural_knowledge_from_db(db_row)
|
|
1931
|
+
except Exception as e:
|
|
1932
|
+
log_error(f"Failed to get cultural knowledge {id}: {e}")
|
|
1933
|
+
raise e
|
|
1934
|
+
|
|
1935
|
+
def get_all_cultural_knowledge(
|
|
1936
|
+
self,
|
|
1937
|
+
name: Optional[str] = None,
|
|
1938
|
+
agent_id: Optional[str] = None,
|
|
1939
|
+
team_id: Optional[str] = None,
|
|
1940
|
+
limit: Optional[int] = None,
|
|
1941
|
+
page: Optional[int] = None,
|
|
1942
|
+
sort_by: Optional[str] = None,
|
|
1943
|
+
sort_order: Optional[str] = None,
|
|
1944
|
+
deserialize: Optional[bool] = True,
|
|
1945
|
+
) -> Union[List[CulturalKnowledge], Tuple[List[Dict[str, Any]], int]]:
|
|
1946
|
+
"""Get all cultural knowledge from the database."""
|
|
1947
|
+
try:
|
|
1948
|
+
table_name = self._get_table("culture")
|
|
1949
|
+
|
|
1950
|
+
# Build filter expression
|
|
1951
|
+
filter_expressions = []
|
|
1952
|
+
expression_values = {}
|
|
1953
|
+
|
|
1954
|
+
if name:
|
|
1955
|
+
filter_expressions.append("#name = :name")
|
|
1956
|
+
expression_values[":name"] = {"S": name}
|
|
1957
|
+
if agent_id:
|
|
1958
|
+
filter_expressions.append("agent_id = :agent_id")
|
|
1959
|
+
expression_values[":agent_id"] = {"S": agent_id}
|
|
1960
|
+
if team_id:
|
|
1961
|
+
filter_expressions.append("team_id = :team_id")
|
|
1962
|
+
expression_values[":team_id"] = {"S": team_id}
|
|
1963
|
+
|
|
1964
|
+
scan_kwargs: Dict[str, Any] = {"TableName": table_name}
|
|
1965
|
+
if filter_expressions:
|
|
1966
|
+
scan_kwargs["FilterExpression"] = " AND ".join(filter_expressions)
|
|
1967
|
+
scan_kwargs["ExpressionAttributeValues"] = expression_values
|
|
1968
|
+
if name:
|
|
1969
|
+
scan_kwargs["ExpressionAttributeNames"] = {"#name": "name"}
|
|
1970
|
+
|
|
1971
|
+
# Execute scan
|
|
1972
|
+
response = self.client.scan(**scan_kwargs)
|
|
1973
|
+
items = response.get("Items", [])
|
|
1974
|
+
|
|
1975
|
+
# Continue scanning if there's more data
|
|
1976
|
+
while "LastEvaluatedKey" in response:
|
|
1977
|
+
scan_kwargs["ExclusiveStartKey"] = response["LastEvaluatedKey"]
|
|
1978
|
+
response = self.client.scan(**scan_kwargs)
|
|
1979
|
+
items.extend(response.get("Items", []))
|
|
1980
|
+
|
|
1981
|
+
# Deserialize items from DynamoDB format
|
|
1982
|
+
db_rows = [deserialize_from_dynamodb_item(item) for item in items]
|
|
1983
|
+
|
|
1984
|
+
# Apply sorting
|
|
1985
|
+
if sort_by:
|
|
1986
|
+
reverse = sort_order == "desc" if sort_order else False
|
|
1987
|
+
db_rows.sort(key=lambda x: x.get(sort_by, ""), reverse=reverse)
|
|
1988
|
+
|
|
1989
|
+
# Apply pagination
|
|
1990
|
+
total_count = len(db_rows)
|
|
1991
|
+
if limit and page:
|
|
1992
|
+
start = (page - 1) * limit
|
|
1993
|
+
db_rows = db_rows[start : start + limit]
|
|
1994
|
+
elif limit:
|
|
1995
|
+
db_rows = db_rows[:limit]
|
|
1996
|
+
|
|
1997
|
+
if not deserialize:
|
|
1998
|
+
return db_rows, total_count
|
|
1999
|
+
|
|
2000
|
+
return [deserialize_cultural_knowledge_from_db(row) for row in db_rows]
|
|
2001
|
+
except Exception as e:
|
|
2002
|
+
log_error(f"Failed to get all cultural knowledge: {e}")
|
|
2003
|
+
raise e
|
|
2004
|
+
|
|
2005
|
+
def upsert_cultural_knowledge(
|
|
2006
|
+
self, cultural_knowledge: CulturalKnowledge, deserialize: Optional[bool] = True
|
|
2007
|
+
) -> Optional[Union[CulturalKnowledge, Dict[str, Any]]]:
|
|
2008
|
+
"""Upsert a cultural knowledge entry into the database."""
|
|
2009
|
+
try:
|
|
2010
|
+
from uuid import uuid4
|
|
2011
|
+
|
|
2012
|
+
table_name = self._get_table("culture", create_table_if_not_found=True)
|
|
2013
|
+
|
|
2014
|
+
if not cultural_knowledge.id:
|
|
2015
|
+
cultural_knowledge.id = str(uuid4())
|
|
2016
|
+
|
|
2017
|
+
# Serialize content, categories, and notes into a dict for DB storage
|
|
2018
|
+
content_dict = serialize_cultural_knowledge_for_db(cultural_knowledge)
|
|
2019
|
+
|
|
2020
|
+
# Create the item dict with serialized content
|
|
2021
|
+
item_dict = {
|
|
2022
|
+
"id": cultural_knowledge.id,
|
|
2023
|
+
"name": cultural_knowledge.name,
|
|
2024
|
+
"summary": cultural_knowledge.summary,
|
|
2025
|
+
"content": content_dict if content_dict else None,
|
|
2026
|
+
"metadata": cultural_knowledge.metadata,
|
|
2027
|
+
"input": cultural_knowledge.input,
|
|
2028
|
+
"created_at": cultural_knowledge.created_at,
|
|
2029
|
+
"updated_at": int(time.time()),
|
|
2030
|
+
"agent_id": cultural_knowledge.agent_id,
|
|
2031
|
+
"team_id": cultural_knowledge.team_id,
|
|
2032
|
+
}
|
|
2033
|
+
|
|
2034
|
+
# Convert to DynamoDB format
|
|
2035
|
+
item = serialize_to_dynamo_item(item_dict)
|
|
2036
|
+
self.client.put_item(TableName=table_name, Item=item)
|
|
2037
|
+
|
|
2038
|
+
return self.get_cultural_knowledge(cultural_knowledge.id, deserialize=deserialize)
|
|
2039
|
+
|
|
2040
|
+
except Exception as e:
|
|
2041
|
+
log_error(f"Failed to upsert cultural knowledge: {e}")
|
|
2042
|
+
raise e
|