agno 2.2.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agno/__init__.py +8 -0
- agno/agent/__init__.py +51 -0
- agno/agent/agent.py +10405 -0
- agno/api/__init__.py +0 -0
- agno/api/agent.py +28 -0
- agno/api/api.py +40 -0
- agno/api/evals.py +22 -0
- agno/api/os.py +17 -0
- agno/api/routes.py +13 -0
- agno/api/schemas/__init__.py +9 -0
- agno/api/schemas/agent.py +16 -0
- agno/api/schemas/evals.py +16 -0
- agno/api/schemas/os.py +14 -0
- agno/api/schemas/response.py +6 -0
- agno/api/schemas/team.py +16 -0
- agno/api/schemas/utils.py +21 -0
- agno/api/schemas/workflows.py +16 -0
- agno/api/settings.py +53 -0
- agno/api/team.py +30 -0
- agno/api/workflow.py +28 -0
- agno/cloud/aws/base.py +214 -0
- agno/cloud/aws/s3/__init__.py +2 -0
- agno/cloud/aws/s3/api_client.py +43 -0
- agno/cloud/aws/s3/bucket.py +195 -0
- agno/cloud/aws/s3/object.py +57 -0
- agno/culture/__init__.py +3 -0
- agno/culture/manager.py +956 -0
- agno/db/__init__.py +24 -0
- agno/db/async_postgres/__init__.py +3 -0
- agno/db/base.py +598 -0
- agno/db/dynamo/__init__.py +3 -0
- agno/db/dynamo/dynamo.py +2042 -0
- agno/db/dynamo/schemas.py +314 -0
- agno/db/dynamo/utils.py +743 -0
- agno/db/firestore/__init__.py +3 -0
- agno/db/firestore/firestore.py +1795 -0
- agno/db/firestore/schemas.py +140 -0
- agno/db/firestore/utils.py +376 -0
- agno/db/gcs_json/__init__.py +3 -0
- agno/db/gcs_json/gcs_json_db.py +1335 -0
- agno/db/gcs_json/utils.py +228 -0
- agno/db/in_memory/__init__.py +3 -0
- agno/db/in_memory/in_memory_db.py +1160 -0
- agno/db/in_memory/utils.py +230 -0
- agno/db/json/__init__.py +3 -0
- agno/db/json/json_db.py +1328 -0
- agno/db/json/utils.py +230 -0
- agno/db/migrations/__init__.py +0 -0
- agno/db/migrations/v1_to_v2.py +635 -0
- agno/db/mongo/__init__.py +17 -0
- agno/db/mongo/async_mongo.py +2026 -0
- agno/db/mongo/mongo.py +1982 -0
- agno/db/mongo/schemas.py +87 -0
- agno/db/mongo/utils.py +259 -0
- agno/db/mysql/__init__.py +3 -0
- agno/db/mysql/mysql.py +2308 -0
- agno/db/mysql/schemas.py +138 -0
- agno/db/mysql/utils.py +355 -0
- agno/db/postgres/__init__.py +4 -0
- agno/db/postgres/async_postgres.py +1927 -0
- agno/db/postgres/postgres.py +2260 -0
- agno/db/postgres/schemas.py +139 -0
- agno/db/postgres/utils.py +442 -0
- agno/db/redis/__init__.py +3 -0
- agno/db/redis/redis.py +1660 -0
- agno/db/redis/schemas.py +123 -0
- agno/db/redis/utils.py +346 -0
- agno/db/schemas/__init__.py +4 -0
- agno/db/schemas/culture.py +120 -0
- agno/db/schemas/evals.py +33 -0
- agno/db/schemas/knowledge.py +40 -0
- agno/db/schemas/memory.py +46 -0
- agno/db/schemas/metrics.py +0 -0
- agno/db/singlestore/__init__.py +3 -0
- agno/db/singlestore/schemas.py +130 -0
- agno/db/singlestore/singlestore.py +2272 -0
- agno/db/singlestore/utils.py +384 -0
- agno/db/sqlite/__init__.py +4 -0
- agno/db/sqlite/async_sqlite.py +2293 -0
- agno/db/sqlite/schemas.py +133 -0
- agno/db/sqlite/sqlite.py +2288 -0
- agno/db/sqlite/utils.py +431 -0
- agno/db/surrealdb/__init__.py +3 -0
- agno/db/surrealdb/metrics.py +292 -0
- agno/db/surrealdb/models.py +309 -0
- agno/db/surrealdb/queries.py +71 -0
- agno/db/surrealdb/surrealdb.py +1353 -0
- agno/db/surrealdb/utils.py +147 -0
- agno/db/utils.py +116 -0
- agno/debug.py +18 -0
- agno/eval/__init__.py +14 -0
- agno/eval/accuracy.py +834 -0
- agno/eval/performance.py +773 -0
- agno/eval/reliability.py +306 -0
- agno/eval/utils.py +119 -0
- agno/exceptions.py +161 -0
- agno/filters.py +354 -0
- agno/guardrails/__init__.py +6 -0
- agno/guardrails/base.py +19 -0
- agno/guardrails/openai.py +144 -0
- agno/guardrails/pii.py +94 -0
- agno/guardrails/prompt_injection.py +52 -0
- agno/integrations/__init__.py +0 -0
- agno/integrations/discord/__init__.py +3 -0
- agno/integrations/discord/client.py +203 -0
- agno/knowledge/__init__.py +5 -0
- agno/knowledge/chunking/__init__.py +0 -0
- agno/knowledge/chunking/agentic.py +79 -0
- agno/knowledge/chunking/document.py +91 -0
- agno/knowledge/chunking/fixed.py +57 -0
- agno/knowledge/chunking/markdown.py +151 -0
- agno/knowledge/chunking/recursive.py +63 -0
- agno/knowledge/chunking/row.py +39 -0
- agno/knowledge/chunking/semantic.py +86 -0
- agno/knowledge/chunking/strategy.py +165 -0
- agno/knowledge/content.py +74 -0
- agno/knowledge/document/__init__.py +5 -0
- agno/knowledge/document/base.py +58 -0
- agno/knowledge/embedder/__init__.py +5 -0
- agno/knowledge/embedder/aws_bedrock.py +343 -0
- agno/knowledge/embedder/azure_openai.py +210 -0
- agno/knowledge/embedder/base.py +23 -0
- agno/knowledge/embedder/cohere.py +323 -0
- agno/knowledge/embedder/fastembed.py +62 -0
- agno/knowledge/embedder/fireworks.py +13 -0
- agno/knowledge/embedder/google.py +258 -0
- agno/knowledge/embedder/huggingface.py +94 -0
- agno/knowledge/embedder/jina.py +182 -0
- agno/knowledge/embedder/langdb.py +22 -0
- agno/knowledge/embedder/mistral.py +206 -0
- agno/knowledge/embedder/nebius.py +13 -0
- agno/knowledge/embedder/ollama.py +154 -0
- agno/knowledge/embedder/openai.py +195 -0
- agno/knowledge/embedder/sentence_transformer.py +63 -0
- agno/knowledge/embedder/together.py +13 -0
- agno/knowledge/embedder/vllm.py +262 -0
- agno/knowledge/embedder/voyageai.py +165 -0
- agno/knowledge/knowledge.py +1988 -0
- agno/knowledge/reader/__init__.py +7 -0
- agno/knowledge/reader/arxiv_reader.py +81 -0
- agno/knowledge/reader/base.py +95 -0
- agno/knowledge/reader/csv_reader.py +166 -0
- agno/knowledge/reader/docx_reader.py +82 -0
- agno/knowledge/reader/field_labeled_csv_reader.py +292 -0
- agno/knowledge/reader/firecrawl_reader.py +201 -0
- agno/knowledge/reader/json_reader.py +87 -0
- agno/knowledge/reader/markdown_reader.py +137 -0
- agno/knowledge/reader/pdf_reader.py +431 -0
- agno/knowledge/reader/pptx_reader.py +101 -0
- agno/knowledge/reader/reader_factory.py +313 -0
- agno/knowledge/reader/s3_reader.py +89 -0
- agno/knowledge/reader/tavily_reader.py +194 -0
- agno/knowledge/reader/text_reader.py +115 -0
- agno/knowledge/reader/web_search_reader.py +372 -0
- agno/knowledge/reader/website_reader.py +455 -0
- agno/knowledge/reader/wikipedia_reader.py +59 -0
- agno/knowledge/reader/youtube_reader.py +78 -0
- agno/knowledge/remote_content/__init__.py +0 -0
- agno/knowledge/remote_content/remote_content.py +88 -0
- agno/knowledge/reranker/__init__.py +3 -0
- agno/knowledge/reranker/base.py +14 -0
- agno/knowledge/reranker/cohere.py +64 -0
- agno/knowledge/reranker/infinity.py +195 -0
- agno/knowledge/reranker/sentence_transformer.py +54 -0
- agno/knowledge/types.py +39 -0
- agno/knowledge/utils.py +189 -0
- agno/media.py +462 -0
- agno/memory/__init__.py +3 -0
- agno/memory/manager.py +1327 -0
- agno/models/__init__.py +0 -0
- agno/models/aimlapi/__init__.py +5 -0
- agno/models/aimlapi/aimlapi.py +45 -0
- agno/models/anthropic/__init__.py +5 -0
- agno/models/anthropic/claude.py +757 -0
- agno/models/aws/__init__.py +15 -0
- agno/models/aws/bedrock.py +701 -0
- agno/models/aws/claude.py +378 -0
- agno/models/azure/__init__.py +18 -0
- agno/models/azure/ai_foundry.py +485 -0
- agno/models/azure/openai_chat.py +131 -0
- agno/models/base.py +2175 -0
- agno/models/cerebras/__init__.py +12 -0
- agno/models/cerebras/cerebras.py +501 -0
- agno/models/cerebras/cerebras_openai.py +112 -0
- agno/models/cohere/__init__.py +5 -0
- agno/models/cohere/chat.py +389 -0
- agno/models/cometapi/__init__.py +5 -0
- agno/models/cometapi/cometapi.py +57 -0
- agno/models/dashscope/__init__.py +5 -0
- agno/models/dashscope/dashscope.py +91 -0
- agno/models/deepinfra/__init__.py +5 -0
- agno/models/deepinfra/deepinfra.py +28 -0
- agno/models/deepseek/__init__.py +5 -0
- agno/models/deepseek/deepseek.py +61 -0
- agno/models/defaults.py +1 -0
- agno/models/fireworks/__init__.py +5 -0
- agno/models/fireworks/fireworks.py +26 -0
- agno/models/google/__init__.py +5 -0
- agno/models/google/gemini.py +1085 -0
- agno/models/groq/__init__.py +5 -0
- agno/models/groq/groq.py +556 -0
- agno/models/huggingface/__init__.py +5 -0
- agno/models/huggingface/huggingface.py +491 -0
- agno/models/ibm/__init__.py +5 -0
- agno/models/ibm/watsonx.py +422 -0
- agno/models/internlm/__init__.py +3 -0
- agno/models/internlm/internlm.py +26 -0
- agno/models/langdb/__init__.py +1 -0
- agno/models/langdb/langdb.py +48 -0
- agno/models/litellm/__init__.py +14 -0
- agno/models/litellm/chat.py +468 -0
- agno/models/litellm/litellm_openai.py +25 -0
- agno/models/llama_cpp/__init__.py +5 -0
- agno/models/llama_cpp/llama_cpp.py +22 -0
- agno/models/lmstudio/__init__.py +5 -0
- agno/models/lmstudio/lmstudio.py +25 -0
- agno/models/message.py +434 -0
- agno/models/meta/__init__.py +12 -0
- agno/models/meta/llama.py +475 -0
- agno/models/meta/llama_openai.py +78 -0
- agno/models/metrics.py +120 -0
- agno/models/mistral/__init__.py +5 -0
- agno/models/mistral/mistral.py +432 -0
- agno/models/nebius/__init__.py +3 -0
- agno/models/nebius/nebius.py +54 -0
- agno/models/nexus/__init__.py +3 -0
- agno/models/nexus/nexus.py +22 -0
- agno/models/nvidia/__init__.py +5 -0
- agno/models/nvidia/nvidia.py +28 -0
- agno/models/ollama/__init__.py +5 -0
- agno/models/ollama/chat.py +441 -0
- agno/models/openai/__init__.py +9 -0
- agno/models/openai/chat.py +883 -0
- agno/models/openai/like.py +27 -0
- agno/models/openai/responses.py +1050 -0
- agno/models/openrouter/__init__.py +5 -0
- agno/models/openrouter/openrouter.py +66 -0
- agno/models/perplexity/__init__.py +5 -0
- agno/models/perplexity/perplexity.py +187 -0
- agno/models/portkey/__init__.py +3 -0
- agno/models/portkey/portkey.py +81 -0
- agno/models/requesty/__init__.py +5 -0
- agno/models/requesty/requesty.py +52 -0
- agno/models/response.py +199 -0
- agno/models/sambanova/__init__.py +5 -0
- agno/models/sambanova/sambanova.py +28 -0
- agno/models/siliconflow/__init__.py +5 -0
- agno/models/siliconflow/siliconflow.py +25 -0
- agno/models/together/__init__.py +5 -0
- agno/models/together/together.py +25 -0
- agno/models/utils.py +266 -0
- agno/models/vercel/__init__.py +3 -0
- agno/models/vercel/v0.py +26 -0
- agno/models/vertexai/__init__.py +0 -0
- agno/models/vertexai/claude.py +70 -0
- agno/models/vllm/__init__.py +3 -0
- agno/models/vllm/vllm.py +78 -0
- agno/models/xai/__init__.py +3 -0
- agno/models/xai/xai.py +113 -0
- agno/os/__init__.py +3 -0
- agno/os/app.py +876 -0
- agno/os/auth.py +57 -0
- agno/os/config.py +104 -0
- agno/os/interfaces/__init__.py +1 -0
- agno/os/interfaces/a2a/__init__.py +3 -0
- agno/os/interfaces/a2a/a2a.py +42 -0
- agno/os/interfaces/a2a/router.py +250 -0
- agno/os/interfaces/a2a/utils.py +924 -0
- agno/os/interfaces/agui/__init__.py +3 -0
- agno/os/interfaces/agui/agui.py +47 -0
- agno/os/interfaces/agui/router.py +144 -0
- agno/os/interfaces/agui/utils.py +534 -0
- agno/os/interfaces/base.py +25 -0
- agno/os/interfaces/slack/__init__.py +3 -0
- agno/os/interfaces/slack/router.py +148 -0
- agno/os/interfaces/slack/security.py +30 -0
- agno/os/interfaces/slack/slack.py +47 -0
- agno/os/interfaces/whatsapp/__init__.py +3 -0
- agno/os/interfaces/whatsapp/router.py +211 -0
- agno/os/interfaces/whatsapp/security.py +53 -0
- agno/os/interfaces/whatsapp/whatsapp.py +36 -0
- agno/os/mcp.py +292 -0
- agno/os/middleware/__init__.py +7 -0
- agno/os/middleware/jwt.py +233 -0
- agno/os/router.py +1763 -0
- agno/os/routers/__init__.py +3 -0
- agno/os/routers/evals/__init__.py +3 -0
- agno/os/routers/evals/evals.py +430 -0
- agno/os/routers/evals/schemas.py +142 -0
- agno/os/routers/evals/utils.py +162 -0
- agno/os/routers/health.py +31 -0
- agno/os/routers/home.py +52 -0
- agno/os/routers/knowledge/__init__.py +3 -0
- agno/os/routers/knowledge/knowledge.py +997 -0
- agno/os/routers/knowledge/schemas.py +178 -0
- agno/os/routers/memory/__init__.py +3 -0
- agno/os/routers/memory/memory.py +515 -0
- agno/os/routers/memory/schemas.py +62 -0
- agno/os/routers/metrics/__init__.py +3 -0
- agno/os/routers/metrics/metrics.py +190 -0
- agno/os/routers/metrics/schemas.py +47 -0
- agno/os/routers/session/__init__.py +3 -0
- agno/os/routers/session/session.py +997 -0
- agno/os/schema.py +1055 -0
- agno/os/settings.py +43 -0
- agno/os/utils.py +630 -0
- agno/py.typed +0 -0
- agno/reasoning/__init__.py +0 -0
- agno/reasoning/anthropic.py +80 -0
- agno/reasoning/azure_ai_foundry.py +67 -0
- agno/reasoning/deepseek.py +63 -0
- agno/reasoning/default.py +97 -0
- agno/reasoning/gemini.py +73 -0
- agno/reasoning/groq.py +71 -0
- agno/reasoning/helpers.py +63 -0
- agno/reasoning/ollama.py +67 -0
- agno/reasoning/openai.py +86 -0
- agno/reasoning/step.py +31 -0
- agno/reasoning/vertexai.py +76 -0
- agno/run/__init__.py +6 -0
- agno/run/agent.py +787 -0
- agno/run/base.py +229 -0
- agno/run/cancel.py +81 -0
- agno/run/messages.py +32 -0
- agno/run/team.py +753 -0
- agno/run/workflow.py +708 -0
- agno/session/__init__.py +10 -0
- agno/session/agent.py +295 -0
- agno/session/summary.py +265 -0
- agno/session/team.py +392 -0
- agno/session/workflow.py +205 -0
- agno/team/__init__.py +37 -0
- agno/team/team.py +8793 -0
- agno/tools/__init__.py +10 -0
- agno/tools/agentql.py +120 -0
- agno/tools/airflow.py +69 -0
- agno/tools/api.py +122 -0
- agno/tools/apify.py +314 -0
- agno/tools/arxiv.py +127 -0
- agno/tools/aws_lambda.py +53 -0
- agno/tools/aws_ses.py +66 -0
- agno/tools/baidusearch.py +89 -0
- agno/tools/bitbucket.py +292 -0
- agno/tools/brandfetch.py +213 -0
- agno/tools/bravesearch.py +106 -0
- agno/tools/brightdata.py +367 -0
- agno/tools/browserbase.py +209 -0
- agno/tools/calcom.py +255 -0
- agno/tools/calculator.py +151 -0
- agno/tools/cartesia.py +187 -0
- agno/tools/clickup.py +244 -0
- agno/tools/confluence.py +240 -0
- agno/tools/crawl4ai.py +158 -0
- agno/tools/csv_toolkit.py +185 -0
- agno/tools/dalle.py +110 -0
- agno/tools/daytona.py +475 -0
- agno/tools/decorator.py +262 -0
- agno/tools/desi_vocal.py +108 -0
- agno/tools/discord.py +161 -0
- agno/tools/docker.py +716 -0
- agno/tools/duckdb.py +379 -0
- agno/tools/duckduckgo.py +91 -0
- agno/tools/e2b.py +703 -0
- agno/tools/eleven_labs.py +196 -0
- agno/tools/email.py +67 -0
- agno/tools/evm.py +129 -0
- agno/tools/exa.py +396 -0
- agno/tools/fal.py +127 -0
- agno/tools/file.py +240 -0
- agno/tools/file_generation.py +350 -0
- agno/tools/financial_datasets.py +288 -0
- agno/tools/firecrawl.py +143 -0
- agno/tools/function.py +1187 -0
- agno/tools/giphy.py +93 -0
- agno/tools/github.py +1760 -0
- agno/tools/gmail.py +922 -0
- agno/tools/google_bigquery.py +117 -0
- agno/tools/google_drive.py +270 -0
- agno/tools/google_maps.py +253 -0
- agno/tools/googlecalendar.py +674 -0
- agno/tools/googlesearch.py +98 -0
- agno/tools/googlesheets.py +377 -0
- agno/tools/hackernews.py +77 -0
- agno/tools/jina.py +101 -0
- agno/tools/jira.py +170 -0
- agno/tools/knowledge.py +218 -0
- agno/tools/linear.py +426 -0
- agno/tools/linkup.py +58 -0
- agno/tools/local_file_system.py +90 -0
- agno/tools/lumalab.py +183 -0
- agno/tools/mcp/__init__.py +10 -0
- agno/tools/mcp/mcp.py +331 -0
- agno/tools/mcp/multi_mcp.py +347 -0
- agno/tools/mcp/params.py +24 -0
- agno/tools/mcp_toolbox.py +284 -0
- agno/tools/mem0.py +193 -0
- agno/tools/memori.py +339 -0
- agno/tools/memory.py +419 -0
- agno/tools/mlx_transcribe.py +139 -0
- agno/tools/models/__init__.py +0 -0
- agno/tools/models/azure_openai.py +190 -0
- agno/tools/models/gemini.py +203 -0
- agno/tools/models/groq.py +158 -0
- agno/tools/models/morph.py +186 -0
- agno/tools/models/nebius.py +124 -0
- agno/tools/models_labs.py +195 -0
- agno/tools/moviepy_video.py +349 -0
- agno/tools/neo4j.py +134 -0
- agno/tools/newspaper.py +46 -0
- agno/tools/newspaper4k.py +93 -0
- agno/tools/notion.py +204 -0
- agno/tools/openai.py +202 -0
- agno/tools/openbb.py +160 -0
- agno/tools/opencv.py +321 -0
- agno/tools/openweather.py +233 -0
- agno/tools/oxylabs.py +385 -0
- agno/tools/pandas.py +102 -0
- agno/tools/parallel.py +314 -0
- agno/tools/postgres.py +257 -0
- agno/tools/pubmed.py +188 -0
- agno/tools/python.py +205 -0
- agno/tools/reasoning.py +283 -0
- agno/tools/reddit.py +467 -0
- agno/tools/replicate.py +117 -0
- agno/tools/resend.py +62 -0
- agno/tools/scrapegraph.py +222 -0
- agno/tools/searxng.py +152 -0
- agno/tools/serpapi.py +116 -0
- agno/tools/serper.py +255 -0
- agno/tools/shell.py +53 -0
- agno/tools/slack.py +136 -0
- agno/tools/sleep.py +20 -0
- agno/tools/spider.py +116 -0
- agno/tools/sql.py +154 -0
- agno/tools/streamlit/__init__.py +0 -0
- agno/tools/streamlit/components.py +113 -0
- agno/tools/tavily.py +254 -0
- agno/tools/telegram.py +48 -0
- agno/tools/todoist.py +218 -0
- agno/tools/tool_registry.py +1 -0
- agno/tools/toolkit.py +146 -0
- agno/tools/trafilatura.py +388 -0
- agno/tools/trello.py +274 -0
- agno/tools/twilio.py +186 -0
- agno/tools/user_control_flow.py +78 -0
- agno/tools/valyu.py +228 -0
- agno/tools/visualization.py +467 -0
- agno/tools/webbrowser.py +28 -0
- agno/tools/webex.py +76 -0
- agno/tools/website.py +54 -0
- agno/tools/webtools.py +45 -0
- agno/tools/whatsapp.py +286 -0
- agno/tools/wikipedia.py +63 -0
- agno/tools/workflow.py +278 -0
- agno/tools/x.py +335 -0
- agno/tools/yfinance.py +257 -0
- agno/tools/youtube.py +184 -0
- agno/tools/zendesk.py +82 -0
- agno/tools/zep.py +454 -0
- agno/tools/zoom.py +382 -0
- agno/utils/__init__.py +0 -0
- agno/utils/agent.py +820 -0
- agno/utils/audio.py +49 -0
- agno/utils/certs.py +27 -0
- agno/utils/code_execution.py +11 -0
- agno/utils/common.py +132 -0
- agno/utils/dttm.py +13 -0
- agno/utils/enum.py +22 -0
- agno/utils/env.py +11 -0
- agno/utils/events.py +696 -0
- agno/utils/format_str.py +16 -0
- agno/utils/functions.py +166 -0
- agno/utils/gemini.py +426 -0
- agno/utils/hooks.py +57 -0
- agno/utils/http.py +74 -0
- agno/utils/json_schema.py +234 -0
- agno/utils/knowledge.py +36 -0
- agno/utils/location.py +19 -0
- agno/utils/log.py +255 -0
- agno/utils/mcp.py +214 -0
- agno/utils/media.py +352 -0
- agno/utils/merge_dict.py +41 -0
- agno/utils/message.py +118 -0
- agno/utils/models/__init__.py +0 -0
- agno/utils/models/ai_foundry.py +43 -0
- agno/utils/models/claude.py +358 -0
- agno/utils/models/cohere.py +87 -0
- agno/utils/models/llama.py +78 -0
- agno/utils/models/mistral.py +98 -0
- agno/utils/models/openai_responses.py +140 -0
- agno/utils/models/schema_utils.py +153 -0
- agno/utils/models/watsonx.py +41 -0
- agno/utils/openai.py +257 -0
- agno/utils/pickle.py +32 -0
- agno/utils/pprint.py +178 -0
- agno/utils/print_response/__init__.py +0 -0
- agno/utils/print_response/agent.py +842 -0
- agno/utils/print_response/team.py +1724 -0
- agno/utils/print_response/workflow.py +1668 -0
- agno/utils/prompts.py +111 -0
- agno/utils/reasoning.py +108 -0
- agno/utils/response.py +163 -0
- agno/utils/response_iterator.py +17 -0
- agno/utils/safe_formatter.py +24 -0
- agno/utils/serialize.py +32 -0
- agno/utils/shell.py +22 -0
- agno/utils/streamlit.py +487 -0
- agno/utils/string.py +231 -0
- agno/utils/team.py +139 -0
- agno/utils/timer.py +41 -0
- agno/utils/tools.py +102 -0
- agno/utils/web.py +23 -0
- agno/utils/whatsapp.py +305 -0
- agno/utils/yaml_io.py +25 -0
- agno/vectordb/__init__.py +3 -0
- agno/vectordb/base.py +127 -0
- agno/vectordb/cassandra/__init__.py +5 -0
- agno/vectordb/cassandra/cassandra.py +501 -0
- agno/vectordb/cassandra/extra_param_mixin.py +11 -0
- agno/vectordb/cassandra/index.py +13 -0
- agno/vectordb/chroma/__init__.py +5 -0
- agno/vectordb/chroma/chromadb.py +929 -0
- agno/vectordb/clickhouse/__init__.py +9 -0
- agno/vectordb/clickhouse/clickhousedb.py +835 -0
- agno/vectordb/clickhouse/index.py +9 -0
- agno/vectordb/couchbase/__init__.py +3 -0
- agno/vectordb/couchbase/couchbase.py +1442 -0
- agno/vectordb/distance.py +7 -0
- agno/vectordb/lancedb/__init__.py +6 -0
- agno/vectordb/lancedb/lance_db.py +995 -0
- agno/vectordb/langchaindb/__init__.py +5 -0
- agno/vectordb/langchaindb/langchaindb.py +163 -0
- agno/vectordb/lightrag/__init__.py +5 -0
- agno/vectordb/lightrag/lightrag.py +388 -0
- agno/vectordb/llamaindex/__init__.py +3 -0
- agno/vectordb/llamaindex/llamaindexdb.py +166 -0
- agno/vectordb/milvus/__init__.py +4 -0
- agno/vectordb/milvus/milvus.py +1182 -0
- agno/vectordb/mongodb/__init__.py +9 -0
- agno/vectordb/mongodb/mongodb.py +1417 -0
- agno/vectordb/pgvector/__init__.py +12 -0
- agno/vectordb/pgvector/index.py +23 -0
- agno/vectordb/pgvector/pgvector.py +1462 -0
- agno/vectordb/pineconedb/__init__.py +5 -0
- agno/vectordb/pineconedb/pineconedb.py +747 -0
- agno/vectordb/qdrant/__init__.py +5 -0
- agno/vectordb/qdrant/qdrant.py +1134 -0
- agno/vectordb/redis/__init__.py +9 -0
- agno/vectordb/redis/redisdb.py +694 -0
- agno/vectordb/search.py +7 -0
- agno/vectordb/singlestore/__init__.py +10 -0
- agno/vectordb/singlestore/index.py +41 -0
- agno/vectordb/singlestore/singlestore.py +763 -0
- agno/vectordb/surrealdb/__init__.py +3 -0
- agno/vectordb/surrealdb/surrealdb.py +699 -0
- agno/vectordb/upstashdb/__init__.py +5 -0
- agno/vectordb/upstashdb/upstashdb.py +718 -0
- agno/vectordb/weaviate/__init__.py +8 -0
- agno/vectordb/weaviate/index.py +15 -0
- agno/vectordb/weaviate/weaviate.py +1005 -0
- agno/workflow/__init__.py +23 -0
- agno/workflow/agent.py +299 -0
- agno/workflow/condition.py +738 -0
- agno/workflow/loop.py +735 -0
- agno/workflow/parallel.py +824 -0
- agno/workflow/router.py +702 -0
- agno/workflow/step.py +1432 -0
- agno/workflow/steps.py +592 -0
- agno/workflow/types.py +520 -0
- agno/workflow/workflow.py +4321 -0
- agno-2.2.13.dist-info/METADATA +614 -0
- agno-2.2.13.dist-info/RECORD +575 -0
- agno-2.2.13.dist-info/WHEEL +5 -0
- agno-2.2.13.dist-info/licenses/LICENSE +201 -0
- agno-2.2.13.dist-info/top_level.txt +1 -0
agno/db/redis/redis.py
ADDED
|
@@ -0,0 +1,1660 @@
|
|
|
1
|
+
import time
|
|
2
|
+
from datetime import date, datetime, timedelta, timezone
|
|
3
|
+
from typing import Any, Dict, List, Optional, Tuple, Union
|
|
4
|
+
from uuid import uuid4
|
|
5
|
+
|
|
6
|
+
from agno.db.base import BaseDb, SessionType
|
|
7
|
+
from agno.db.redis.utils import (
|
|
8
|
+
apply_filters,
|
|
9
|
+
apply_pagination,
|
|
10
|
+
apply_sorting,
|
|
11
|
+
calculate_date_metrics,
|
|
12
|
+
create_index_entries,
|
|
13
|
+
deserialize_cultural_knowledge_from_db,
|
|
14
|
+
deserialize_data,
|
|
15
|
+
fetch_all_sessions_data,
|
|
16
|
+
generate_redis_key,
|
|
17
|
+
get_all_keys_for_table,
|
|
18
|
+
get_dates_to_calculate_metrics_for,
|
|
19
|
+
remove_index_entries,
|
|
20
|
+
serialize_cultural_knowledge_for_db,
|
|
21
|
+
serialize_data,
|
|
22
|
+
)
|
|
23
|
+
from agno.db.schemas.culture import CulturalKnowledge
|
|
24
|
+
from agno.db.schemas.evals import EvalFilterType, EvalRunRecord, EvalType
|
|
25
|
+
from agno.db.schemas.knowledge import KnowledgeRow
|
|
26
|
+
from agno.db.schemas.memory import UserMemory
|
|
27
|
+
from agno.session import AgentSession, Session, TeamSession, WorkflowSession
|
|
28
|
+
from agno.utils.log import log_debug, log_error, log_info
|
|
29
|
+
from agno.utils.string import generate_id
|
|
30
|
+
|
|
31
|
+
try:
|
|
32
|
+
from redis import Redis
|
|
33
|
+
except ImportError:
|
|
34
|
+
raise ImportError("`redis` not installed. Please install it using `pip install redis`")
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class RedisDb(BaseDb):
|
|
38
|
+
def __init__(
|
|
39
|
+
self,
|
|
40
|
+
id: Optional[str] = None,
|
|
41
|
+
redis_client: Optional[Redis] = None,
|
|
42
|
+
db_url: Optional[str] = None,
|
|
43
|
+
db_prefix: str = "agno",
|
|
44
|
+
expire: Optional[int] = None,
|
|
45
|
+
session_table: Optional[str] = None,
|
|
46
|
+
memory_table: Optional[str] = None,
|
|
47
|
+
metrics_table: Optional[str] = None,
|
|
48
|
+
eval_table: Optional[str] = None,
|
|
49
|
+
knowledge_table: Optional[str] = None,
|
|
50
|
+
culture_table: Optional[str] = None,
|
|
51
|
+
):
|
|
52
|
+
"""
|
|
53
|
+
Interface for interacting with a Redis database.
|
|
54
|
+
|
|
55
|
+
The following order is used to determine the database connection:
|
|
56
|
+
1. Use the redis_client if provided
|
|
57
|
+
2. Use the db_url
|
|
58
|
+
3. Raise an error if neither is provided
|
|
59
|
+
|
|
60
|
+
Args:
|
|
61
|
+
id (Optional[str]): The ID of the database.
|
|
62
|
+
redis_client (Optional[Redis]): Redis client instance to use. If not provided a new client will be created.
|
|
63
|
+
db_url (Optional[str]): Redis connection URL (e.g., "redis://localhost:6379/0" or "rediss://user:pass@host:port/db")
|
|
64
|
+
db_prefix (str): Prefix for all Redis keys
|
|
65
|
+
expire (Optional[int]): TTL for Redis keys in seconds
|
|
66
|
+
session_table (Optional[str]): Name of the table to store sessions
|
|
67
|
+
memory_table (Optional[str]): Name of the table to store memories
|
|
68
|
+
metrics_table (Optional[str]): Name of the table to store metrics
|
|
69
|
+
eval_table (Optional[str]): Name of the table to store evaluation runs
|
|
70
|
+
knowledge_table (Optional[str]): Name of the table to store knowledge documents
|
|
71
|
+
culture_table (Optional[str]): Name of the table to store cultural knowledge
|
|
72
|
+
|
|
73
|
+
Raises:
|
|
74
|
+
ValueError: If neither redis_client nor db_url is provided.
|
|
75
|
+
"""
|
|
76
|
+
if id is None:
|
|
77
|
+
base_seed = db_url or str(redis_client)
|
|
78
|
+
seed = f"{base_seed}#{db_prefix}"
|
|
79
|
+
id = generate_id(seed)
|
|
80
|
+
|
|
81
|
+
super().__init__(
|
|
82
|
+
id=id,
|
|
83
|
+
session_table=session_table,
|
|
84
|
+
memory_table=memory_table,
|
|
85
|
+
metrics_table=metrics_table,
|
|
86
|
+
eval_table=eval_table,
|
|
87
|
+
knowledge_table=knowledge_table,
|
|
88
|
+
culture_table=culture_table,
|
|
89
|
+
)
|
|
90
|
+
|
|
91
|
+
self.db_prefix = db_prefix
|
|
92
|
+
self.expire = expire
|
|
93
|
+
|
|
94
|
+
if redis_client is not None:
|
|
95
|
+
self.redis_client = redis_client
|
|
96
|
+
elif db_url is not None:
|
|
97
|
+
self.redis_client = Redis.from_url(db_url, decode_responses=True)
|
|
98
|
+
else:
|
|
99
|
+
raise ValueError("One of redis_client or db_url must be provided")
|
|
100
|
+
|
|
101
|
+
# -- DB methods --
|
|
102
|
+
|
|
103
|
+
def table_exists(self, table_name: str) -> bool:
|
|
104
|
+
"""Redis implementation, always returns True."""
|
|
105
|
+
return True
|
|
106
|
+
|
|
107
|
+
def _get_table_name(self, table_type: str) -> str:
|
|
108
|
+
"""Get the active table name for the given table type."""
|
|
109
|
+
if table_type == "sessions":
|
|
110
|
+
return self.session_table_name
|
|
111
|
+
|
|
112
|
+
elif table_type == "memories":
|
|
113
|
+
return self.memory_table_name
|
|
114
|
+
|
|
115
|
+
elif table_type == "metrics":
|
|
116
|
+
return self.metrics_table_name
|
|
117
|
+
|
|
118
|
+
elif table_type == "evals":
|
|
119
|
+
return self.eval_table_name
|
|
120
|
+
|
|
121
|
+
elif table_type == "knowledge":
|
|
122
|
+
return self.knowledge_table_name
|
|
123
|
+
|
|
124
|
+
elif table_type == "culture":
|
|
125
|
+
return self.culture_table_name
|
|
126
|
+
|
|
127
|
+
else:
|
|
128
|
+
raise ValueError(f"Unknown table type: {table_type}")
|
|
129
|
+
|
|
130
|
+
def _store_record(
|
|
131
|
+
self, table_type: str, record_id: str, data: Dict[str, Any], index_fields: Optional[List[str]] = None
|
|
132
|
+
) -> bool:
|
|
133
|
+
"""Generic method to store a record in Redis, considering optional indexing.
|
|
134
|
+
|
|
135
|
+
Args:
|
|
136
|
+
table_type (str): The type of table to store the record in.
|
|
137
|
+
record_id (str): The ID of the record to store.
|
|
138
|
+
data (Dict[str, Any]): The data to store in the record.
|
|
139
|
+
index_fields (Optional[List[str]]): The fields to index the record by.
|
|
140
|
+
|
|
141
|
+
Returns:
|
|
142
|
+
bool: True if the record was stored successfully, False otherwise.
|
|
143
|
+
"""
|
|
144
|
+
try:
|
|
145
|
+
key = generate_redis_key(prefix=self.db_prefix, table_type=table_type, key_id=record_id)
|
|
146
|
+
serialized_data = serialize_data(data)
|
|
147
|
+
|
|
148
|
+
self.redis_client.set(key, serialized_data, ex=self.expire)
|
|
149
|
+
|
|
150
|
+
if index_fields:
|
|
151
|
+
create_index_entries(
|
|
152
|
+
redis_client=self.redis_client,
|
|
153
|
+
prefix=self.db_prefix,
|
|
154
|
+
table_type=table_type,
|
|
155
|
+
record_id=record_id,
|
|
156
|
+
record_data=data,
|
|
157
|
+
index_fields=index_fields,
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
return True
|
|
161
|
+
|
|
162
|
+
except Exception as e:
|
|
163
|
+
log_error(f"Error storing Redis record: {e}")
|
|
164
|
+
return False
|
|
165
|
+
|
|
166
|
+
def _get_record(self, table_type: str, record_id: str) -> Optional[Dict[str, Any]]:
|
|
167
|
+
"""Generic method to get a record from Redis.
|
|
168
|
+
|
|
169
|
+
Args:
|
|
170
|
+
table_type (str): The type of table to get the record from.
|
|
171
|
+
record_id (str): The ID of the record to get.
|
|
172
|
+
|
|
173
|
+
Returns:
|
|
174
|
+
Optional[Dict[str, Any]]: The record data if found, None otherwise.
|
|
175
|
+
"""
|
|
176
|
+
try:
|
|
177
|
+
key = generate_redis_key(prefix=self.db_prefix, table_type=table_type, key_id=record_id)
|
|
178
|
+
|
|
179
|
+
data = self.redis_client.get(key)
|
|
180
|
+
if data is None:
|
|
181
|
+
return None
|
|
182
|
+
|
|
183
|
+
return deserialize_data(data) # type: ignore
|
|
184
|
+
|
|
185
|
+
except Exception as e:
|
|
186
|
+
log_error(f"Error getting record {record_id}: {e}")
|
|
187
|
+
return None
|
|
188
|
+
|
|
189
|
+
def _delete_record(self, table_type: str, record_id: str, index_fields: Optional[List[str]] = None) -> bool:
|
|
190
|
+
"""Generic method to delete a record from Redis.
|
|
191
|
+
|
|
192
|
+
Args:
|
|
193
|
+
table_type (str): The type of table to delete the record from.
|
|
194
|
+
record_id (str): The ID of the record to delete.
|
|
195
|
+
index_fields (Optional[List[str]]): The fields to index the record by.
|
|
196
|
+
|
|
197
|
+
Returns:
|
|
198
|
+
bool: True if the record was deleted successfully, False otherwise.
|
|
199
|
+
|
|
200
|
+
Raises:
|
|
201
|
+
Exception: If any error occurs while deleting the record.
|
|
202
|
+
"""
|
|
203
|
+
try:
|
|
204
|
+
# Handle index deletion first
|
|
205
|
+
if index_fields:
|
|
206
|
+
record_data = self._get_record(table_type, record_id)
|
|
207
|
+
if record_data:
|
|
208
|
+
remove_index_entries(
|
|
209
|
+
redis_client=self.redis_client,
|
|
210
|
+
prefix=self.db_prefix,
|
|
211
|
+
table_type=table_type,
|
|
212
|
+
record_id=record_id,
|
|
213
|
+
record_data=record_data,
|
|
214
|
+
index_fields=index_fields,
|
|
215
|
+
)
|
|
216
|
+
|
|
217
|
+
key = generate_redis_key(prefix=self.db_prefix, table_type=table_type, key_id=record_id)
|
|
218
|
+
result = self.redis_client.delete(key)
|
|
219
|
+
if result is None or result == 0:
|
|
220
|
+
return False
|
|
221
|
+
|
|
222
|
+
return True
|
|
223
|
+
|
|
224
|
+
except Exception as e:
|
|
225
|
+
log_error(f"Error deleting record {record_id}: {e}")
|
|
226
|
+
return False
|
|
227
|
+
|
|
228
|
+
def _get_all_records(self, table_type: str) -> List[Dict[str, Any]]:
|
|
229
|
+
"""Generic method to get all records for a table type.
|
|
230
|
+
|
|
231
|
+
Args:
|
|
232
|
+
table_type (str): The type of table to get the records from.
|
|
233
|
+
|
|
234
|
+
Returns:
|
|
235
|
+
List[Dict[str, Any]]: The records data if found, None otherwise.
|
|
236
|
+
|
|
237
|
+
Raises:
|
|
238
|
+
Exception: If any error occurs while getting the records.
|
|
239
|
+
"""
|
|
240
|
+
try:
|
|
241
|
+
keys = get_all_keys_for_table(redis_client=self.redis_client, prefix=self.db_prefix, table_type=table_type)
|
|
242
|
+
|
|
243
|
+
records = []
|
|
244
|
+
for key in keys:
|
|
245
|
+
data = self.redis_client.get(key)
|
|
246
|
+
if data:
|
|
247
|
+
records.append(deserialize_data(data)) # type: ignore
|
|
248
|
+
|
|
249
|
+
return records
|
|
250
|
+
|
|
251
|
+
except Exception as e:
|
|
252
|
+
log_error(f"Error getting all records for {table_type}: {e}")
|
|
253
|
+
return []
|
|
254
|
+
|
|
255
|
+
# -- Session methods --
|
|
256
|
+
|
|
257
|
+
def delete_session(self, session_id: str) -> bool:
|
|
258
|
+
"""Delete a session from Redis.
|
|
259
|
+
|
|
260
|
+
Args:
|
|
261
|
+
session_id (str): The ID of the session to delete.
|
|
262
|
+
|
|
263
|
+
Raises:
|
|
264
|
+
Exception: If any error occurs while deleting the session.
|
|
265
|
+
"""
|
|
266
|
+
try:
|
|
267
|
+
if self._delete_record(
|
|
268
|
+
table_type="sessions",
|
|
269
|
+
record_id=session_id,
|
|
270
|
+
index_fields=["user_id", "agent_id", "team_id", "workflow_id", "session_type"],
|
|
271
|
+
):
|
|
272
|
+
log_debug(f"Successfully deleted session: {session_id}")
|
|
273
|
+
return True
|
|
274
|
+
else:
|
|
275
|
+
log_debug(f"No session found to delete with session_id: {session_id}")
|
|
276
|
+
return False
|
|
277
|
+
|
|
278
|
+
except Exception as e:
|
|
279
|
+
log_error(f"Error deleting session: {e}")
|
|
280
|
+
raise e
|
|
281
|
+
|
|
282
|
+
def delete_sessions(self, session_ids: List[str]) -> None:
|
|
283
|
+
"""Delete multiple sessions from Redis.
|
|
284
|
+
|
|
285
|
+
Args:
|
|
286
|
+
session_ids (List[str]): The IDs of the sessions to delete.
|
|
287
|
+
|
|
288
|
+
Raises:
|
|
289
|
+
Exception: If any error occurs while deleting the sessions.
|
|
290
|
+
"""
|
|
291
|
+
try:
|
|
292
|
+
deleted_count = 0
|
|
293
|
+
for session_id in session_ids:
|
|
294
|
+
if self._delete_record(
|
|
295
|
+
"sessions",
|
|
296
|
+
session_id,
|
|
297
|
+
index_fields=["user_id", "agent_id", "team_id", "workflow_id", "session_type"],
|
|
298
|
+
):
|
|
299
|
+
deleted_count += 1
|
|
300
|
+
log_debug(f"Successfully deleted {deleted_count} sessions")
|
|
301
|
+
|
|
302
|
+
except Exception as e:
|
|
303
|
+
log_error(f"Error deleting sessions: {e}")
|
|
304
|
+
raise e
|
|
305
|
+
|
|
306
|
+
def get_session(
|
|
307
|
+
self,
|
|
308
|
+
session_id: str,
|
|
309
|
+
session_type: SessionType,
|
|
310
|
+
user_id: Optional[str] = None,
|
|
311
|
+
deserialize: Optional[bool] = True,
|
|
312
|
+
) -> Optional[Union[Session, Dict[str, Any]]]:
|
|
313
|
+
"""Read a session from Redis.
|
|
314
|
+
|
|
315
|
+
Args:
|
|
316
|
+
session_id (str): The ID of the session to get.
|
|
317
|
+
session_type (SessionType): The type of session to get.
|
|
318
|
+
user_id (Optional[str]): The ID of the user to filter by.
|
|
319
|
+
|
|
320
|
+
Returns:
|
|
321
|
+
Optional[Union[AgentSession, TeamSession, WorkflowSession]]: The session if found, None otherwise.
|
|
322
|
+
|
|
323
|
+
Raises:
|
|
324
|
+
Exception: If any error occurs while getting the session.
|
|
325
|
+
"""
|
|
326
|
+
try:
|
|
327
|
+
session = self._get_record("sessions", session_id)
|
|
328
|
+
if session is None:
|
|
329
|
+
return None
|
|
330
|
+
|
|
331
|
+
# Apply filters
|
|
332
|
+
if user_id is not None and session.get("user_id") != user_id:
|
|
333
|
+
return None
|
|
334
|
+
|
|
335
|
+
if not deserialize:
|
|
336
|
+
return session
|
|
337
|
+
|
|
338
|
+
if session_type == SessionType.AGENT.value:
|
|
339
|
+
return AgentSession.from_dict(session)
|
|
340
|
+
elif session_type == SessionType.TEAM.value:
|
|
341
|
+
return TeamSession.from_dict(session)
|
|
342
|
+
elif session_type == SessionType.WORKFLOW.value:
|
|
343
|
+
return WorkflowSession.from_dict(session)
|
|
344
|
+
else:
|
|
345
|
+
raise ValueError(f"Invalid session type: {session_type}")
|
|
346
|
+
|
|
347
|
+
except Exception as e:
|
|
348
|
+
log_error(f"Exception reading session: {e}")
|
|
349
|
+
raise e
|
|
350
|
+
|
|
351
|
+
# TODO: optimizable
|
|
352
|
+
def get_sessions(
|
|
353
|
+
self,
|
|
354
|
+
session_type: Optional[SessionType] = None,
|
|
355
|
+
user_id: Optional[str] = None,
|
|
356
|
+
component_id: Optional[str] = None,
|
|
357
|
+
session_name: Optional[str] = None,
|
|
358
|
+
start_timestamp: Optional[int] = None,
|
|
359
|
+
end_timestamp: Optional[int] = None,
|
|
360
|
+
limit: Optional[int] = None,
|
|
361
|
+
page: Optional[int] = None,
|
|
362
|
+
sort_by: Optional[str] = None,
|
|
363
|
+
sort_order: Optional[str] = None,
|
|
364
|
+
deserialize: Optional[bool] = True,
|
|
365
|
+
create_index_if_not_found: Optional[bool] = True,
|
|
366
|
+
) -> Union[List[Session], Tuple[List[Dict[str, Any]], int]]:
|
|
367
|
+
"""Get all sessions matching the given filters.
|
|
368
|
+
|
|
369
|
+
Args:
|
|
370
|
+
session_type (Optional[SessionType]): The type of session to filter by.
|
|
371
|
+
user_id (Optional[str]): The ID of the user to filter by.
|
|
372
|
+
component_id (Optional[str]): The ID of the component to filter by.
|
|
373
|
+
session_name (Optional[str]): The name of the session to filter by.
|
|
374
|
+
limit (Optional[int]): The maximum number of sessions to return.
|
|
375
|
+
page (Optional[int]): The page number to return.
|
|
376
|
+
sort_by (Optional[str]): The field to sort by.
|
|
377
|
+
sort_order (Optional[str]): The order to sort by.
|
|
378
|
+
|
|
379
|
+
Returns:
|
|
380
|
+
List[Union[AgentSession, TeamSession, WorkflowSession]]: The list of sessions.
|
|
381
|
+
"""
|
|
382
|
+
try:
|
|
383
|
+
all_sessions = self._get_all_records("sessions")
|
|
384
|
+
|
|
385
|
+
conditions: Dict[str, Any] = {}
|
|
386
|
+
if session_type is not None:
|
|
387
|
+
conditions["session_type"] = session_type
|
|
388
|
+
if user_id is not None:
|
|
389
|
+
conditions["user_id"] = user_id
|
|
390
|
+
|
|
391
|
+
filtered_sessions = apply_filters(records=all_sessions, conditions=conditions)
|
|
392
|
+
|
|
393
|
+
if component_id is not None:
|
|
394
|
+
if session_type == SessionType.AGENT:
|
|
395
|
+
filtered_sessions = [s for s in filtered_sessions if s.get("agent_id") == component_id]
|
|
396
|
+
elif session_type == SessionType.TEAM:
|
|
397
|
+
filtered_sessions = [s for s in filtered_sessions if s.get("team_id") == component_id]
|
|
398
|
+
elif session_type == SessionType.WORKFLOW:
|
|
399
|
+
filtered_sessions = [s for s in filtered_sessions if s.get("workflow_id") == component_id]
|
|
400
|
+
if start_timestamp is not None:
|
|
401
|
+
filtered_sessions = [s for s in filtered_sessions if s.get("created_at", 0) >= start_timestamp]
|
|
402
|
+
if end_timestamp is not None:
|
|
403
|
+
filtered_sessions = [s for s in filtered_sessions if s.get("created_at", 0) <= end_timestamp]
|
|
404
|
+
|
|
405
|
+
if session_name is not None:
|
|
406
|
+
filtered_sessions = [
|
|
407
|
+
s
|
|
408
|
+
for s in filtered_sessions
|
|
409
|
+
if session_name.lower() in s.get("session_data", {}).get("session_name", "").lower()
|
|
410
|
+
]
|
|
411
|
+
|
|
412
|
+
sorted_sessions = apply_sorting(records=filtered_sessions, sort_by=sort_by, sort_order=sort_order)
|
|
413
|
+
sessions = apply_pagination(records=sorted_sessions, limit=limit, page=page)
|
|
414
|
+
sessions = [record for record in sessions]
|
|
415
|
+
|
|
416
|
+
if not deserialize:
|
|
417
|
+
return sessions, len(filtered_sessions)
|
|
418
|
+
|
|
419
|
+
if session_type == SessionType.AGENT:
|
|
420
|
+
return [AgentSession.from_dict(record) for record in sessions] # type: ignore
|
|
421
|
+
elif session_type == SessionType.TEAM:
|
|
422
|
+
return [TeamSession.from_dict(record) for record in sessions] # type: ignore
|
|
423
|
+
elif session_type == SessionType.WORKFLOW:
|
|
424
|
+
return [WorkflowSession.from_dict(record) for record in sessions] # type: ignore
|
|
425
|
+
else:
|
|
426
|
+
raise ValueError(f"Invalid session type: {session_type}")
|
|
427
|
+
|
|
428
|
+
except Exception as e:
|
|
429
|
+
log_error(f"Exception reading sessions: {e}")
|
|
430
|
+
raise e
|
|
431
|
+
|
|
432
|
+
def rename_session(
|
|
433
|
+
self, session_id: str, session_type: SessionType, session_name: str, deserialize: Optional[bool] = True
|
|
434
|
+
) -> Optional[Union[Session, Dict[str, Any]]]:
|
|
435
|
+
"""Rename a session in Redis.
|
|
436
|
+
|
|
437
|
+
Args:
|
|
438
|
+
session_id (str): The ID of the session to rename.
|
|
439
|
+
session_type (SessionType): The type of session to rename.
|
|
440
|
+
session_name (str): The new name of the session.
|
|
441
|
+
|
|
442
|
+
Returns:
|
|
443
|
+
Optional[Session]: The renamed session if successful, None otherwise.
|
|
444
|
+
|
|
445
|
+
Raises:
|
|
446
|
+
Exception: If any error occurs while renaming the session.
|
|
447
|
+
"""
|
|
448
|
+
try:
|
|
449
|
+
session = self._get_record("sessions", session_id)
|
|
450
|
+
if session is None:
|
|
451
|
+
return None
|
|
452
|
+
|
|
453
|
+
# Update session_name, in session_data
|
|
454
|
+
if "session_data" not in session:
|
|
455
|
+
session["session_data"] = {}
|
|
456
|
+
session["session_data"]["session_name"] = session_name
|
|
457
|
+
session["updated_at"] = int(time.time())
|
|
458
|
+
|
|
459
|
+
# Store updated session
|
|
460
|
+
success = self._store_record("sessions", session_id, session)
|
|
461
|
+
if not success:
|
|
462
|
+
return None
|
|
463
|
+
|
|
464
|
+
log_debug(f"Renamed session with id '{session_id}' to '{session_name}'")
|
|
465
|
+
|
|
466
|
+
if not deserialize:
|
|
467
|
+
return session
|
|
468
|
+
|
|
469
|
+
if session_type == SessionType.AGENT:
|
|
470
|
+
return AgentSession.from_dict(session)
|
|
471
|
+
elif session_type == SessionType.TEAM:
|
|
472
|
+
return TeamSession.from_dict(session)
|
|
473
|
+
elif session_type == SessionType.WORKFLOW:
|
|
474
|
+
return WorkflowSession.from_dict(session)
|
|
475
|
+
else:
|
|
476
|
+
raise ValueError(f"Invalid session type: {session_type}")
|
|
477
|
+
|
|
478
|
+
except Exception as e:
|
|
479
|
+
log_error(f"Error renaming session: {e}")
|
|
480
|
+
raise e
|
|
481
|
+
|
|
482
|
+
def upsert_session(
|
|
483
|
+
self, session: Session, deserialize: Optional[bool] = True
|
|
484
|
+
) -> Optional[Union[Session, Dict[str, Any]]]:
|
|
485
|
+
"""Insert or update a session in Redis.
|
|
486
|
+
|
|
487
|
+
Args:
|
|
488
|
+
session (Session): The session to upsert.
|
|
489
|
+
|
|
490
|
+
Returns:
|
|
491
|
+
Optional[Session]: The upserted session if successful, None otherwise.
|
|
492
|
+
|
|
493
|
+
Raises:
|
|
494
|
+
Exception: If any error occurs while upserting the session.
|
|
495
|
+
"""
|
|
496
|
+
try:
|
|
497
|
+
session_dict = session.to_dict()
|
|
498
|
+
|
|
499
|
+
if isinstance(session, AgentSession):
|
|
500
|
+
data = {
|
|
501
|
+
"session_id": session_dict.get("session_id"),
|
|
502
|
+
"session_type": SessionType.AGENT.value,
|
|
503
|
+
"agent_id": session_dict.get("agent_id"),
|
|
504
|
+
"team_id": session_dict.get("team_id"),
|
|
505
|
+
"workflow_id": session_dict.get("workflow_id"),
|
|
506
|
+
"user_id": session_dict.get("user_id"),
|
|
507
|
+
"runs": session_dict.get("runs"),
|
|
508
|
+
"agent_data": session_dict.get("agent_data"),
|
|
509
|
+
"team_data": session_dict.get("team_data"),
|
|
510
|
+
"workflow_data": session_dict.get("workflow_data"),
|
|
511
|
+
"session_data": session_dict.get("session_data"),
|
|
512
|
+
"summary": session_dict.get("summary"),
|
|
513
|
+
"metadata": session_dict.get("metadata"),
|
|
514
|
+
"created_at": session_dict.get("created_at") or int(time.time()),
|
|
515
|
+
"updated_at": int(time.time()),
|
|
516
|
+
}
|
|
517
|
+
|
|
518
|
+
success = self._store_record(
|
|
519
|
+
table_type="sessions",
|
|
520
|
+
record_id=session.session_id,
|
|
521
|
+
data=data,
|
|
522
|
+
index_fields=["user_id", "agent_id", "session_type"],
|
|
523
|
+
)
|
|
524
|
+
if not success:
|
|
525
|
+
return None
|
|
526
|
+
|
|
527
|
+
if not deserialize:
|
|
528
|
+
return data
|
|
529
|
+
|
|
530
|
+
return AgentSession.from_dict(data)
|
|
531
|
+
|
|
532
|
+
elif isinstance(session, TeamSession):
|
|
533
|
+
data = {
|
|
534
|
+
"session_id": session_dict.get("session_id"),
|
|
535
|
+
"session_type": SessionType.TEAM.value,
|
|
536
|
+
"agent_id": None,
|
|
537
|
+
"team_id": session_dict.get("team_id"),
|
|
538
|
+
"workflow_id": None,
|
|
539
|
+
"user_id": session_dict.get("user_id"),
|
|
540
|
+
"runs": session_dict.get("runs"),
|
|
541
|
+
"team_data": session_dict.get("team_data"),
|
|
542
|
+
"agent_data": None,
|
|
543
|
+
"workflow_data": None,
|
|
544
|
+
"session_data": session_dict.get("session_data"),
|
|
545
|
+
"summary": session_dict.get("summary"),
|
|
546
|
+
"metadata": session_dict.get("metadata"),
|
|
547
|
+
"created_at": session_dict.get("created_at") or int(time.time()),
|
|
548
|
+
"updated_at": int(time.time()),
|
|
549
|
+
}
|
|
550
|
+
|
|
551
|
+
success = self._store_record(
|
|
552
|
+
table_type="sessions",
|
|
553
|
+
record_id=session.session_id,
|
|
554
|
+
data=data,
|
|
555
|
+
index_fields=["user_id", "team_id", "session_type"],
|
|
556
|
+
)
|
|
557
|
+
if not success:
|
|
558
|
+
return None
|
|
559
|
+
|
|
560
|
+
if not deserialize:
|
|
561
|
+
return data
|
|
562
|
+
|
|
563
|
+
return TeamSession.from_dict(data)
|
|
564
|
+
|
|
565
|
+
else:
|
|
566
|
+
data = {
|
|
567
|
+
"session_id": session_dict.get("session_id"),
|
|
568
|
+
"session_type": SessionType.WORKFLOW.value,
|
|
569
|
+
"workflow_id": session_dict.get("workflow_id"),
|
|
570
|
+
"user_id": session_dict.get("user_id"),
|
|
571
|
+
"runs": session_dict.get("runs"),
|
|
572
|
+
"workflow_data": session_dict.get("workflow_data"),
|
|
573
|
+
"session_data": session_dict.get("session_data"),
|
|
574
|
+
"metadata": session_dict.get("metadata"),
|
|
575
|
+
"created_at": session_dict.get("created_at") or int(time.time()),
|
|
576
|
+
"updated_at": int(time.time()),
|
|
577
|
+
"agent_id": None,
|
|
578
|
+
"team_id": None,
|
|
579
|
+
"agent_data": None,
|
|
580
|
+
"team_data": None,
|
|
581
|
+
"summary": None,
|
|
582
|
+
}
|
|
583
|
+
|
|
584
|
+
success = self._store_record(
|
|
585
|
+
table_type="sessions",
|
|
586
|
+
record_id=session.session_id,
|
|
587
|
+
data=data,
|
|
588
|
+
index_fields=["user_id", "workflow_id", "session_type"],
|
|
589
|
+
)
|
|
590
|
+
if not success:
|
|
591
|
+
return None
|
|
592
|
+
|
|
593
|
+
if not deserialize:
|
|
594
|
+
return data
|
|
595
|
+
|
|
596
|
+
return WorkflowSession.from_dict(data)
|
|
597
|
+
|
|
598
|
+
except Exception as e:
|
|
599
|
+
log_error(f"Error upserting session: {e}")
|
|
600
|
+
raise e
|
|
601
|
+
|
|
602
|
+
def upsert_sessions(
|
|
603
|
+
self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
|
|
604
|
+
) -> List[Union[Session, Dict[str, Any]]]:
|
|
605
|
+
"""
|
|
606
|
+
Bulk upsert multiple sessions for improved performance on large datasets.
|
|
607
|
+
|
|
608
|
+
Args:
|
|
609
|
+
sessions (List[Session]): List of sessions to upsert.
|
|
610
|
+
deserialize (Optional[bool]): Whether to deserialize the sessions. Defaults to True.
|
|
611
|
+
|
|
612
|
+
Returns:
|
|
613
|
+
List[Union[Session, Dict[str, Any]]]: List of upserted sessions.
|
|
614
|
+
|
|
615
|
+
Raises:
|
|
616
|
+
Exception: If an error occurs during bulk upsert.
|
|
617
|
+
"""
|
|
618
|
+
if not sessions:
|
|
619
|
+
return []
|
|
620
|
+
|
|
621
|
+
try:
|
|
622
|
+
log_info(
|
|
623
|
+
f"RedisDb doesn't support efficient bulk operations, falling back to individual upserts for {len(sessions)} sessions"
|
|
624
|
+
)
|
|
625
|
+
|
|
626
|
+
# Fall back to individual upserts
|
|
627
|
+
results = []
|
|
628
|
+
for session in sessions:
|
|
629
|
+
if session is not None:
|
|
630
|
+
result = self.upsert_session(session, deserialize=deserialize)
|
|
631
|
+
if result is not None:
|
|
632
|
+
results.append(result)
|
|
633
|
+
return results
|
|
634
|
+
|
|
635
|
+
except Exception as e:
|
|
636
|
+
log_error(f"Exception during bulk session upsert: {e}")
|
|
637
|
+
return []
|
|
638
|
+
|
|
639
|
+
# -- Memory methods --
|
|
640
|
+
|
|
641
|
+
def delete_user_memory(self, memory_id: str, user_id: Optional[str] = None):
|
|
642
|
+
"""Delete a user memory from Redis.
|
|
643
|
+
|
|
644
|
+
Args:
|
|
645
|
+
memory_id (str): The ID of the memory to delete.
|
|
646
|
+
user_id (Optional[str]): The ID of the user. If provided, verifies the memory belongs to this user before deleting.
|
|
647
|
+
|
|
648
|
+
Returns:
|
|
649
|
+
bool: True if the memory was deleted, False otherwise.
|
|
650
|
+
|
|
651
|
+
Raises:
|
|
652
|
+
Exception: If any error occurs while deleting the memory.
|
|
653
|
+
"""
|
|
654
|
+
try:
|
|
655
|
+
# If user_id is provided, verify ownership before deleting
|
|
656
|
+
if user_id is not None:
|
|
657
|
+
memory = self._get_record("memories", memory_id)
|
|
658
|
+
if memory is None:
|
|
659
|
+
log_debug(f"No user memory found with id: {memory_id}")
|
|
660
|
+
return
|
|
661
|
+
if memory.get("user_id") != user_id:
|
|
662
|
+
log_debug(f"Memory {memory_id} does not belong to user {user_id}")
|
|
663
|
+
return
|
|
664
|
+
|
|
665
|
+
if self._delete_record(
|
|
666
|
+
"memories", memory_id, index_fields=["user_id", "agent_id", "team_id", "workflow_id"]
|
|
667
|
+
):
|
|
668
|
+
log_debug(f"Successfully deleted user memory id: {memory_id}")
|
|
669
|
+
else:
|
|
670
|
+
log_debug(f"No user memory found with id: {memory_id}")
|
|
671
|
+
|
|
672
|
+
except Exception as e:
|
|
673
|
+
log_error(f"Error deleting user memory: {e}")
|
|
674
|
+
raise e
|
|
675
|
+
|
|
676
|
+
def delete_user_memories(self, memory_ids: List[str], user_id: Optional[str] = None) -> None:
|
|
677
|
+
"""Delete user memories from Redis.
|
|
678
|
+
|
|
679
|
+
Args:
|
|
680
|
+
memory_ids (List[str]): The IDs of the memories to delete.
|
|
681
|
+
user_id (Optional[str]): The ID of the user. If provided, only deletes memories belonging to this user.
|
|
682
|
+
"""
|
|
683
|
+
try:
|
|
684
|
+
# TODO: cant we optimize this?
|
|
685
|
+
for memory_id in memory_ids:
|
|
686
|
+
# If user_id is provided, verify ownership before deleting
|
|
687
|
+
if user_id is not None:
|
|
688
|
+
memory = self._get_record("memories", memory_id)
|
|
689
|
+
if memory is None:
|
|
690
|
+
continue
|
|
691
|
+
if memory.get("user_id") != user_id:
|
|
692
|
+
log_debug(f"Memory {memory_id} does not belong to user {user_id}, skipping deletion")
|
|
693
|
+
continue
|
|
694
|
+
|
|
695
|
+
self._delete_record(
|
|
696
|
+
"memories",
|
|
697
|
+
memory_id,
|
|
698
|
+
index_fields=["user_id", "agent_id", "team_id", "workflow_id"],
|
|
699
|
+
)
|
|
700
|
+
|
|
701
|
+
except Exception as e:
|
|
702
|
+
log_error(f"Error deleting user memories: {e}")
|
|
703
|
+
raise e
|
|
704
|
+
|
|
705
|
+
def get_all_memory_topics(self) -> List[str]:
|
|
706
|
+
"""Get all memory topics from Redis.
|
|
707
|
+
|
|
708
|
+
Returns:
|
|
709
|
+
List[str]: The list of memory topics.
|
|
710
|
+
"""
|
|
711
|
+
try:
|
|
712
|
+
all_memories = self._get_all_records("memories")
|
|
713
|
+
|
|
714
|
+
topics = set()
|
|
715
|
+
for memory in all_memories:
|
|
716
|
+
memory_topics = memory.get("topics", [])
|
|
717
|
+
if isinstance(memory_topics, list):
|
|
718
|
+
topics.update(memory_topics)
|
|
719
|
+
|
|
720
|
+
return list(topics)
|
|
721
|
+
|
|
722
|
+
except Exception as e:
|
|
723
|
+
log_error(f"Exception reading memory topics: {e}")
|
|
724
|
+
raise e
|
|
725
|
+
|
|
726
|
+
def get_user_memory(
|
|
727
|
+
self, memory_id: str, deserialize: Optional[bool] = True, user_id: Optional[str] = None
|
|
728
|
+
) -> Optional[Union[UserMemory, Dict[str, Any]]]:
|
|
729
|
+
"""Get a memory from Redis.
|
|
730
|
+
|
|
731
|
+
Args:
|
|
732
|
+
memory_id (str): The ID of the memory to get.
|
|
733
|
+
deserialize (Optional[bool]): Whether to deserialize the memory. Defaults to True.
|
|
734
|
+
user_id (Optional[str]): The ID of the user. If provided, only returns the memory if it belongs to this user.
|
|
735
|
+
|
|
736
|
+
Returns:
|
|
737
|
+
Optional[UserMemory]: The memory data if found, None otherwise.
|
|
738
|
+
"""
|
|
739
|
+
try:
|
|
740
|
+
memory_raw = self._get_record("memories", memory_id)
|
|
741
|
+
if memory_raw is None:
|
|
742
|
+
return None
|
|
743
|
+
|
|
744
|
+
# Filter by user_id if provided
|
|
745
|
+
if user_id is not None and memory_raw.get("user_id") != user_id:
|
|
746
|
+
return None
|
|
747
|
+
|
|
748
|
+
if not deserialize:
|
|
749
|
+
return memory_raw
|
|
750
|
+
|
|
751
|
+
return UserMemory.from_dict(memory_raw)
|
|
752
|
+
|
|
753
|
+
except Exception as e:
|
|
754
|
+
log_error(f"Exception reading memory: {e}")
|
|
755
|
+
raise e
|
|
756
|
+
|
|
757
|
+
def get_user_memories(
|
|
758
|
+
self,
|
|
759
|
+
user_id: Optional[str] = None,
|
|
760
|
+
agent_id: Optional[str] = None,
|
|
761
|
+
team_id: Optional[str] = None,
|
|
762
|
+
topics: Optional[List[str]] = None,
|
|
763
|
+
search_content: Optional[str] = None,
|
|
764
|
+
limit: Optional[int] = None,
|
|
765
|
+
page: Optional[int] = None,
|
|
766
|
+
sort_by: Optional[str] = None,
|
|
767
|
+
sort_order: Optional[str] = None,
|
|
768
|
+
deserialize: Optional[bool] = True,
|
|
769
|
+
) -> Union[List[UserMemory], Tuple[List[Dict[str, Any]], int]]:
|
|
770
|
+
"""Get all memories from Redis as UserMemory objects.
|
|
771
|
+
|
|
772
|
+
Args:
|
|
773
|
+
user_id (Optional[str]): The ID of the user to filter by.
|
|
774
|
+
agent_id (Optional[str]): The ID of the agent to filter by.
|
|
775
|
+
team_id (Optional[str]): The ID of the team to filter by.
|
|
776
|
+
topics (Optional[List[str]]): The topics to filter by.
|
|
777
|
+
search_content (Optional[str]): The content to search for.
|
|
778
|
+
limit (Optional[int]): The maximum number of memories to return.
|
|
779
|
+
page (Optional[int]): The page number to return.
|
|
780
|
+
sort_by (Optional[str]): The field to sort by.
|
|
781
|
+
sort_order (Optional[str]): The order to sort by.
|
|
782
|
+
deserialize (Optional[bool]): Whether to deserialize the memories.
|
|
783
|
+
|
|
784
|
+
Returns:
|
|
785
|
+
Union[List[UserMemory], Tuple[List[Dict[str, Any]], int]]:
|
|
786
|
+
- When deserialize=True: List of UserMemory objects
|
|
787
|
+
- When deserialize=False: Tuple of (memory dictionaries, total count)
|
|
788
|
+
|
|
789
|
+
Raises:
|
|
790
|
+
Exception: If any error occurs while reading the memories.
|
|
791
|
+
"""
|
|
792
|
+
try:
|
|
793
|
+
all_memories = self._get_all_records("memories")
|
|
794
|
+
|
|
795
|
+
# Apply filters
|
|
796
|
+
conditions = {}
|
|
797
|
+
if user_id is not None:
|
|
798
|
+
conditions["user_id"] = user_id
|
|
799
|
+
if agent_id is not None:
|
|
800
|
+
conditions["agent_id"] = agent_id
|
|
801
|
+
if team_id is not None:
|
|
802
|
+
conditions["team_id"] = team_id
|
|
803
|
+
|
|
804
|
+
filtered_memories = apply_filters(records=all_memories, conditions=conditions)
|
|
805
|
+
|
|
806
|
+
# Apply topic filter
|
|
807
|
+
if topics is not None:
|
|
808
|
+
filtered_memories = [
|
|
809
|
+
m for m in filtered_memories if any(topic in m.get("topics", []) for topic in topics)
|
|
810
|
+
]
|
|
811
|
+
|
|
812
|
+
# Apply content search
|
|
813
|
+
if search_content is not None:
|
|
814
|
+
filtered_memories = [
|
|
815
|
+
m for m in filtered_memories if search_content.lower() in str(m.get("memory", "")).lower()
|
|
816
|
+
]
|
|
817
|
+
|
|
818
|
+
sorted_memories = apply_sorting(records=filtered_memories, sort_by=sort_by, sort_order=sort_order)
|
|
819
|
+
paginated_memories = apply_pagination(records=sorted_memories, limit=limit, page=page)
|
|
820
|
+
|
|
821
|
+
if not deserialize:
|
|
822
|
+
return paginated_memories, len(filtered_memories)
|
|
823
|
+
|
|
824
|
+
return [UserMemory.from_dict(record) for record in paginated_memories]
|
|
825
|
+
|
|
826
|
+
except Exception as e:
|
|
827
|
+
log_error(f"Exception reading memories: {e}")
|
|
828
|
+
raise e
|
|
829
|
+
|
|
830
|
+
def get_user_memory_stats(
|
|
831
|
+
self,
|
|
832
|
+
limit: Optional[int] = None,
|
|
833
|
+
page: Optional[int] = None,
|
|
834
|
+
) -> Tuple[List[Dict[str, Any]], int]:
|
|
835
|
+
"""Get user memory stats from Redis.
|
|
836
|
+
|
|
837
|
+
Args:
|
|
838
|
+
limit (Optional[int]): The maximum number of stats to return.
|
|
839
|
+
page (Optional[int]): The page number to return.
|
|
840
|
+
|
|
841
|
+
Returns:
|
|
842
|
+
Tuple[List[Dict[str, Any]], int]: A tuple containing the list of stats and the total number of stats.
|
|
843
|
+
|
|
844
|
+
Raises:
|
|
845
|
+
Exception: If any error occurs while getting the user memory stats.
|
|
846
|
+
"""
|
|
847
|
+
try:
|
|
848
|
+
all_memories = self._get_all_records("memories")
|
|
849
|
+
|
|
850
|
+
# Group by user_id
|
|
851
|
+
user_stats = {}
|
|
852
|
+
for memory in all_memories:
|
|
853
|
+
memory_user_id = memory.get("user_id")
|
|
854
|
+
if memory_user_id is None:
|
|
855
|
+
continue
|
|
856
|
+
|
|
857
|
+
if memory_user_id not in user_stats:
|
|
858
|
+
user_stats[memory_user_id] = {
|
|
859
|
+
"user_id": memory_user_id,
|
|
860
|
+
"total_memories": 0,
|
|
861
|
+
"last_memory_updated_at": 0,
|
|
862
|
+
}
|
|
863
|
+
|
|
864
|
+
user_stats[memory_user_id]["total_memories"] += 1
|
|
865
|
+
updated_at = memory.get("updated_at", 0)
|
|
866
|
+
if updated_at > user_stats[memory_user_id]["last_memory_updated_at"]:
|
|
867
|
+
user_stats[memory_user_id]["last_memory_updated_at"] = updated_at
|
|
868
|
+
|
|
869
|
+
stats_list = list(user_stats.values())
|
|
870
|
+
|
|
871
|
+
# Sorting by last_memory_updated_at descending
|
|
872
|
+
stats_list.sort(key=lambda x: x["last_memory_updated_at"], reverse=True)
|
|
873
|
+
|
|
874
|
+
total_count = len(stats_list)
|
|
875
|
+
|
|
876
|
+
paginated_stats = apply_pagination(records=stats_list, limit=limit, page=page)
|
|
877
|
+
|
|
878
|
+
return paginated_stats, total_count
|
|
879
|
+
|
|
880
|
+
except Exception as e:
|
|
881
|
+
log_error(f"Exception getting user memory stats: {e}")
|
|
882
|
+
raise e
|
|
883
|
+
|
|
884
|
+
def upsert_user_memory(
|
|
885
|
+
self, memory: UserMemory, deserialize: Optional[bool] = True
|
|
886
|
+
) -> Optional[Union[UserMemory, Dict[str, Any]]]:
|
|
887
|
+
"""Upsert a user memory in Redis.
|
|
888
|
+
|
|
889
|
+
Args:
|
|
890
|
+
memory (UserMemory): The memory to upsert.
|
|
891
|
+
|
|
892
|
+
Returns:
|
|
893
|
+
Optional[UserMemory]: The upserted memory data if successful, None otherwise.
|
|
894
|
+
"""
|
|
895
|
+
try:
|
|
896
|
+
if memory.memory_id is None:
|
|
897
|
+
memory.memory_id = str(uuid4())
|
|
898
|
+
|
|
899
|
+
data = {
|
|
900
|
+
"user_id": memory.user_id,
|
|
901
|
+
"agent_id": memory.agent_id,
|
|
902
|
+
"team_id": memory.team_id,
|
|
903
|
+
"memory_id": memory.memory_id,
|
|
904
|
+
"memory": memory.memory,
|
|
905
|
+
"topics": memory.topics,
|
|
906
|
+
"updated_at": int(time.time()),
|
|
907
|
+
}
|
|
908
|
+
|
|
909
|
+
success = self._store_record(
|
|
910
|
+
"memories", memory.memory_id, data, index_fields=["user_id", "agent_id", "team_id", "workflow_id"]
|
|
911
|
+
)
|
|
912
|
+
|
|
913
|
+
if not success:
|
|
914
|
+
return None
|
|
915
|
+
|
|
916
|
+
if not deserialize:
|
|
917
|
+
return data
|
|
918
|
+
|
|
919
|
+
return UserMemory.from_dict(data)
|
|
920
|
+
|
|
921
|
+
except Exception as e:
|
|
922
|
+
log_error(f"Error upserting user memory: {e}")
|
|
923
|
+
raise e
|
|
924
|
+
|
|
925
|
+
def upsert_memories(
|
|
926
|
+
self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
|
|
927
|
+
) -> List[Union[UserMemory, Dict[str, Any]]]:
|
|
928
|
+
"""
|
|
929
|
+
Bulk upsert multiple user memories for improved performance on large datasets.
|
|
930
|
+
|
|
931
|
+
Args:
|
|
932
|
+
memories (List[UserMemory]): List of memories to upsert.
|
|
933
|
+
deserialize (Optional[bool]): Whether to deserialize the memories. Defaults to True.
|
|
934
|
+
|
|
935
|
+
Returns:
|
|
936
|
+
List[Union[UserMemory, Dict[str, Any]]]: List of upserted memories.
|
|
937
|
+
|
|
938
|
+
Raises:
|
|
939
|
+
Exception: If an error occurs during bulk upsert.
|
|
940
|
+
"""
|
|
941
|
+
if not memories:
|
|
942
|
+
return []
|
|
943
|
+
|
|
944
|
+
try:
|
|
945
|
+
log_info(
|
|
946
|
+
f"RedisDb doesn't support efficient bulk operations, falling back to individual upserts for {len(memories)} memories"
|
|
947
|
+
)
|
|
948
|
+
|
|
949
|
+
# Fall back to individual upserts
|
|
950
|
+
results = []
|
|
951
|
+
for memory in memories:
|
|
952
|
+
if memory is not None:
|
|
953
|
+
result = self.upsert_user_memory(memory, deserialize=deserialize)
|
|
954
|
+
if result is not None:
|
|
955
|
+
results.append(result)
|
|
956
|
+
return results
|
|
957
|
+
|
|
958
|
+
except Exception as e:
|
|
959
|
+
log_error(f"Exception during bulk memory upsert: {e}")
|
|
960
|
+
return []
|
|
961
|
+
|
|
962
|
+
def clear_memories(self) -> None:
|
|
963
|
+
"""Delete all memories from the database.
|
|
964
|
+
|
|
965
|
+
Raises:
|
|
966
|
+
Exception: If an error occurs during deletion.
|
|
967
|
+
"""
|
|
968
|
+
try:
|
|
969
|
+
# Get all keys for memories table
|
|
970
|
+
keys = get_all_keys_for_table(redis_client=self.redis_client, prefix=self.db_prefix, table_type="memories")
|
|
971
|
+
|
|
972
|
+
if keys:
|
|
973
|
+
# Delete all memory keys in a single batch operation
|
|
974
|
+
self.redis_client.delete(*keys)
|
|
975
|
+
|
|
976
|
+
except Exception as e:
|
|
977
|
+
log_error(f"Exception deleting all memories: {e}")
|
|
978
|
+
raise e
|
|
979
|
+
|
|
980
|
+
# -- Metrics methods --
|
|
981
|
+
|
|
982
|
+
def _get_all_sessions_for_metrics_calculation(
|
|
983
|
+
self, start_timestamp: Optional[int] = None, end_timestamp: Optional[int] = None
|
|
984
|
+
) -> List[Dict[str, Any]]:
|
|
985
|
+
"""Get all sessions for metrics calculation.
|
|
986
|
+
|
|
987
|
+
Args:
|
|
988
|
+
start_timestamp (Optional[int]): The start timestamp to filter by.
|
|
989
|
+
end_timestamp (Optional[int]): The end timestamp to filter by.
|
|
990
|
+
|
|
991
|
+
Returns:
|
|
992
|
+
List[Dict[str, Any]]: The list of sessions.
|
|
993
|
+
|
|
994
|
+
Raises:
|
|
995
|
+
Exception: If any error occurs while getting the sessions.
|
|
996
|
+
"""
|
|
997
|
+
try:
|
|
998
|
+
all_sessions = self._get_all_records("sessions")
|
|
999
|
+
|
|
1000
|
+
# Filter by timestamp if provided
|
|
1001
|
+
if start_timestamp is not None or end_timestamp is not None:
|
|
1002
|
+
filtered_sessions = []
|
|
1003
|
+
for session in all_sessions:
|
|
1004
|
+
created_at = session.get("created_at", 0)
|
|
1005
|
+
if start_timestamp is not None and created_at < start_timestamp:
|
|
1006
|
+
continue
|
|
1007
|
+
if end_timestamp is not None and created_at > end_timestamp:
|
|
1008
|
+
continue
|
|
1009
|
+
filtered_sessions.append(session)
|
|
1010
|
+
return filtered_sessions
|
|
1011
|
+
|
|
1012
|
+
return all_sessions
|
|
1013
|
+
|
|
1014
|
+
except Exception as e:
|
|
1015
|
+
log_error(f"Error reading sessions for metrics: {e}")
|
|
1016
|
+
raise e
|
|
1017
|
+
|
|
1018
|
+
def _get_metrics_calculation_starting_date(self) -> Optional[date]:
|
|
1019
|
+
"""Get the first date for which metrics calculation is needed.
|
|
1020
|
+
|
|
1021
|
+
Returns:
|
|
1022
|
+
Optional[date]: The first date for which metrics calculation is needed.
|
|
1023
|
+
|
|
1024
|
+
Raises:
|
|
1025
|
+
Exception: If any error occurs while getting the metrics calculation starting date.
|
|
1026
|
+
"""
|
|
1027
|
+
try:
|
|
1028
|
+
all_metrics = self._get_all_records("metrics")
|
|
1029
|
+
|
|
1030
|
+
if all_metrics:
|
|
1031
|
+
# Find the latest completed metric
|
|
1032
|
+
completed_metrics = [m for m in all_metrics if m.get("completed", False)]
|
|
1033
|
+
if completed_metrics:
|
|
1034
|
+
latest_completed = max(completed_metrics, key=lambda x: x.get("date", ""))
|
|
1035
|
+
return datetime.fromisoformat(latest_completed["date"]).date() + timedelta(days=1)
|
|
1036
|
+
else:
|
|
1037
|
+
# Find the earliest incomplete metric
|
|
1038
|
+
incomplete_metrics = [m for m in all_metrics if not m.get("completed", False)]
|
|
1039
|
+
if incomplete_metrics:
|
|
1040
|
+
earliest_incomplete = min(incomplete_metrics, key=lambda x: x.get("date", ""))
|
|
1041
|
+
return datetime.fromisoformat(earliest_incomplete["date"]).date()
|
|
1042
|
+
|
|
1043
|
+
# No metrics records, find first session
|
|
1044
|
+
sessions_raw, _ = self.get_sessions(sort_by="created_at", sort_order="asc", limit=1, deserialize=False)
|
|
1045
|
+
if sessions_raw:
|
|
1046
|
+
first_session_date = sessions_raw[0]["created_at"] # type: ignore
|
|
1047
|
+
return datetime.fromtimestamp(first_session_date, tz=timezone.utc).date()
|
|
1048
|
+
|
|
1049
|
+
return None
|
|
1050
|
+
|
|
1051
|
+
except Exception as e:
|
|
1052
|
+
log_error(f"Error getting metrics starting date: {e}")
|
|
1053
|
+
raise e
|
|
1054
|
+
|
|
1055
|
+
def calculate_metrics(self) -> Optional[list[dict]]:
|
|
1056
|
+
"""Calculate metrics for all dates without complete metrics.
|
|
1057
|
+
|
|
1058
|
+
Returns:
|
|
1059
|
+
Optional[list[dict]]: The list of metrics.
|
|
1060
|
+
|
|
1061
|
+
Raises:
|
|
1062
|
+
Exception: If any error occurs while calculating the metrics.
|
|
1063
|
+
"""
|
|
1064
|
+
try:
|
|
1065
|
+
starting_date = self._get_metrics_calculation_starting_date()
|
|
1066
|
+
if starting_date is None:
|
|
1067
|
+
log_info("No session data found. Won't calculate metrics.")
|
|
1068
|
+
return None
|
|
1069
|
+
|
|
1070
|
+
dates_to_process = get_dates_to_calculate_metrics_for(starting_date)
|
|
1071
|
+
if not dates_to_process:
|
|
1072
|
+
log_info("Metrics already calculated for all relevant dates.")
|
|
1073
|
+
return None
|
|
1074
|
+
|
|
1075
|
+
start_timestamp = int(datetime.combine(dates_to_process[0], datetime.min.time()).timestamp())
|
|
1076
|
+
end_timestamp = int(
|
|
1077
|
+
datetime.combine(dates_to_process[-1] + timedelta(days=1), datetime.min.time()).timestamp()
|
|
1078
|
+
)
|
|
1079
|
+
|
|
1080
|
+
sessions = self._get_all_sessions_for_metrics_calculation(
|
|
1081
|
+
start_timestamp=start_timestamp, end_timestamp=end_timestamp
|
|
1082
|
+
)
|
|
1083
|
+
all_sessions_data = fetch_all_sessions_data(
|
|
1084
|
+
sessions=sessions, dates_to_process=dates_to_process, start_timestamp=start_timestamp
|
|
1085
|
+
)
|
|
1086
|
+
if not all_sessions_data:
|
|
1087
|
+
log_info("No new session data found. Won't calculate metrics.")
|
|
1088
|
+
return None
|
|
1089
|
+
|
|
1090
|
+
results = []
|
|
1091
|
+
for date_to_process in dates_to_process:
|
|
1092
|
+
date_key = date_to_process.isoformat()
|
|
1093
|
+
sessions_for_date = all_sessions_data.get(date_key, {})
|
|
1094
|
+
|
|
1095
|
+
# Skip dates with no sessions
|
|
1096
|
+
if not any(len(sessions) > 0 for sessions in sessions_for_date.values()):
|
|
1097
|
+
continue
|
|
1098
|
+
|
|
1099
|
+
metrics_record = calculate_date_metrics(date_to_process, sessions_for_date)
|
|
1100
|
+
|
|
1101
|
+
# Check if a record already exists for this date and aggregation period
|
|
1102
|
+
existing_record = self._get_record("metrics", metrics_record["id"])
|
|
1103
|
+
if existing_record:
|
|
1104
|
+
# Update the existing record while preserving created_at
|
|
1105
|
+
metrics_record["created_at"] = existing_record.get("created_at", metrics_record["created_at"])
|
|
1106
|
+
|
|
1107
|
+
success = self._store_record("metrics", metrics_record["id"], metrics_record)
|
|
1108
|
+
if success:
|
|
1109
|
+
results.append(metrics_record)
|
|
1110
|
+
|
|
1111
|
+
log_debug("Updated metrics calculations")
|
|
1112
|
+
|
|
1113
|
+
return results
|
|
1114
|
+
|
|
1115
|
+
except Exception as e:
|
|
1116
|
+
log_error(f"Error calculating metrics: {e}")
|
|
1117
|
+
raise e
|
|
1118
|
+
|
|
1119
|
+
def get_metrics(
|
|
1120
|
+
self,
|
|
1121
|
+
starting_date: Optional[date] = None,
|
|
1122
|
+
ending_date: Optional[date] = None,
|
|
1123
|
+
) -> Tuple[List[dict], Optional[int]]:
|
|
1124
|
+
"""Get all metrics matching the given date range.
|
|
1125
|
+
|
|
1126
|
+
Args:
|
|
1127
|
+
starting_date (Optional[date]): The starting date to filter by.
|
|
1128
|
+
ending_date (Optional[date]): The ending date to filter by.
|
|
1129
|
+
|
|
1130
|
+
Returns:
|
|
1131
|
+
Tuple[List[dict], Optional[int]]: A tuple containing the list of metrics and the latest updated_at.
|
|
1132
|
+
|
|
1133
|
+
Raises:
|
|
1134
|
+
Exception: If any error occurs while getting the metrics.
|
|
1135
|
+
"""
|
|
1136
|
+
try:
|
|
1137
|
+
all_metrics = self._get_all_records("metrics")
|
|
1138
|
+
|
|
1139
|
+
# Filter by date range
|
|
1140
|
+
if starting_date is not None or ending_date is not None:
|
|
1141
|
+
filtered_metrics = []
|
|
1142
|
+
for metric in all_metrics:
|
|
1143
|
+
metric_date = datetime.fromisoformat(metric.get("date", "")).date()
|
|
1144
|
+
if starting_date is not None and metric_date < starting_date:
|
|
1145
|
+
continue
|
|
1146
|
+
if ending_date is not None and metric_date > ending_date:
|
|
1147
|
+
continue
|
|
1148
|
+
filtered_metrics.append(metric)
|
|
1149
|
+
all_metrics = filtered_metrics
|
|
1150
|
+
|
|
1151
|
+
# Get latest updated_at
|
|
1152
|
+
latest_updated_at = None
|
|
1153
|
+
if all_metrics:
|
|
1154
|
+
latest_updated_at = max(metric.get("updated_at", 0) for metric in all_metrics)
|
|
1155
|
+
|
|
1156
|
+
return all_metrics, latest_updated_at
|
|
1157
|
+
|
|
1158
|
+
except Exception as e:
|
|
1159
|
+
log_error(f"Error getting metrics: {e}")
|
|
1160
|
+
raise e
|
|
1161
|
+
|
|
1162
|
+
# -- Knowledge methods --
|
|
1163
|
+
|
|
1164
|
+
def delete_knowledge_content(self, id: str):
|
|
1165
|
+
"""Delete a knowledge row from the database.
|
|
1166
|
+
|
|
1167
|
+
Args:
|
|
1168
|
+
id (str): The ID of the knowledge row to delete.
|
|
1169
|
+
|
|
1170
|
+
Raises:
|
|
1171
|
+
Exception: If any error occurs while deleting the knowledge content.
|
|
1172
|
+
"""
|
|
1173
|
+
try:
|
|
1174
|
+
self._delete_record("knowledge", id)
|
|
1175
|
+
|
|
1176
|
+
except Exception as e:
|
|
1177
|
+
log_error(f"Error deleting knowledge content: {e}")
|
|
1178
|
+
raise e
|
|
1179
|
+
|
|
1180
|
+
def get_knowledge_content(self, id: str) -> Optional[KnowledgeRow]:
|
|
1181
|
+
"""Get a knowledge row from the database.
|
|
1182
|
+
|
|
1183
|
+
Args:
|
|
1184
|
+
id (str): The ID of the knowledge row to get.
|
|
1185
|
+
|
|
1186
|
+
Returns:
|
|
1187
|
+
Optional[KnowledgeRow]: The knowledge row, or None if it doesn't exist.
|
|
1188
|
+
|
|
1189
|
+
Raises:
|
|
1190
|
+
Exception: If any error occurs while getting the knowledge content.
|
|
1191
|
+
"""
|
|
1192
|
+
try:
|
|
1193
|
+
document_raw = self._get_record("knowledge", id)
|
|
1194
|
+
if document_raw is None:
|
|
1195
|
+
return None
|
|
1196
|
+
|
|
1197
|
+
return KnowledgeRow.model_validate(document_raw)
|
|
1198
|
+
|
|
1199
|
+
except Exception as e:
|
|
1200
|
+
log_error(f"Error getting knowledge content: {e}")
|
|
1201
|
+
raise e
|
|
1202
|
+
|
|
1203
|
+
def get_knowledge_contents(
|
|
1204
|
+
self,
|
|
1205
|
+
limit: Optional[int] = None,
|
|
1206
|
+
page: Optional[int] = None,
|
|
1207
|
+
sort_by: Optional[str] = None,
|
|
1208
|
+
sort_order: Optional[str] = None,
|
|
1209
|
+
) -> Tuple[List[KnowledgeRow], int]:
|
|
1210
|
+
"""Get all knowledge contents from the database.
|
|
1211
|
+
|
|
1212
|
+
Args:
|
|
1213
|
+
limit (Optional[int]): The maximum number of knowledge contents to return.
|
|
1214
|
+
page (Optional[int]): The page number.
|
|
1215
|
+
sort_by (Optional[str]): The column to sort by.
|
|
1216
|
+
sort_order (Optional[str]): The order to sort by.
|
|
1217
|
+
|
|
1218
|
+
Returns:
|
|
1219
|
+
Tuple[List[KnowledgeRow], int]: The knowledge contents and total count.
|
|
1220
|
+
|
|
1221
|
+
Raises:
|
|
1222
|
+
Exception: If an error occurs during retrieval.
|
|
1223
|
+
|
|
1224
|
+
Raises:
|
|
1225
|
+
Exception: If any error occurs while getting the knowledge contents.
|
|
1226
|
+
"""
|
|
1227
|
+
try:
|
|
1228
|
+
all_documents = self._get_all_records("knowledge")
|
|
1229
|
+
if len(all_documents) == 0:
|
|
1230
|
+
return [], 0
|
|
1231
|
+
|
|
1232
|
+
total_count = len(all_documents)
|
|
1233
|
+
|
|
1234
|
+
# Apply sorting
|
|
1235
|
+
sorted_documents = apply_sorting(records=all_documents, sort_by=sort_by, sort_order=sort_order)
|
|
1236
|
+
|
|
1237
|
+
# Apply pagination
|
|
1238
|
+
paginated_documents = apply_pagination(records=sorted_documents, limit=limit, page=page)
|
|
1239
|
+
|
|
1240
|
+
return [KnowledgeRow.model_validate(doc) for doc in paginated_documents], total_count
|
|
1241
|
+
|
|
1242
|
+
except Exception as e:
|
|
1243
|
+
log_error(f"Error getting knowledge contents: {e}")
|
|
1244
|
+
raise e
|
|
1245
|
+
|
|
1246
|
+
def upsert_knowledge_content(self, knowledge_row: KnowledgeRow):
|
|
1247
|
+
"""Upsert knowledge content in the database.
|
|
1248
|
+
|
|
1249
|
+
Args:
|
|
1250
|
+
knowledge_row (KnowledgeRow): The knowledge row to upsert.
|
|
1251
|
+
|
|
1252
|
+
Returns:
|
|
1253
|
+
Optional[KnowledgeRow]: The upserted knowledge row, or None if the operation fails.
|
|
1254
|
+
|
|
1255
|
+
Raises:
|
|
1256
|
+
Exception: If any error occurs while upserting the knowledge content.
|
|
1257
|
+
"""
|
|
1258
|
+
try:
|
|
1259
|
+
data = knowledge_row.model_dump()
|
|
1260
|
+
success = self._store_record("knowledge", knowledge_row.id, data) # type: ignore
|
|
1261
|
+
|
|
1262
|
+
return knowledge_row if success else None
|
|
1263
|
+
|
|
1264
|
+
except Exception as e:
|
|
1265
|
+
log_error(f"Error upserting knowledge content: {e}")
|
|
1266
|
+
raise e
|
|
1267
|
+
|
|
1268
|
+
# -- Eval methods --
|
|
1269
|
+
|
|
1270
|
+
def create_eval_run(self, eval_run: EvalRunRecord) -> Optional[EvalRunRecord]:
|
|
1271
|
+
"""Create an EvalRunRecord in Redis.
|
|
1272
|
+
|
|
1273
|
+
Args:
|
|
1274
|
+
eval_run (EvalRunRecord): The eval run to create.
|
|
1275
|
+
|
|
1276
|
+
Returns:
|
|
1277
|
+
Optional[EvalRunRecord]: The created eval run if successful, None otherwise.
|
|
1278
|
+
|
|
1279
|
+
Raises:
|
|
1280
|
+
Exception: If any error occurs while creating the eval run.
|
|
1281
|
+
"""
|
|
1282
|
+
try:
|
|
1283
|
+
current_time = int(time.time())
|
|
1284
|
+
data = {"created_at": current_time, "updated_at": current_time, **eval_run.model_dump()}
|
|
1285
|
+
|
|
1286
|
+
success = self._store_record(
|
|
1287
|
+
"evals",
|
|
1288
|
+
eval_run.run_id,
|
|
1289
|
+
data,
|
|
1290
|
+
index_fields=["agent_id", "team_id", "workflow_id", "model_id", "eval_type"],
|
|
1291
|
+
)
|
|
1292
|
+
|
|
1293
|
+
log_debug(f"Created eval run with id '{eval_run.run_id}'")
|
|
1294
|
+
|
|
1295
|
+
return eval_run if success else None
|
|
1296
|
+
|
|
1297
|
+
except Exception as e:
|
|
1298
|
+
log_error(f"Error creating eval run: {e}")
|
|
1299
|
+
raise e
|
|
1300
|
+
|
|
1301
|
+
def delete_eval_run(self, eval_run_id: str) -> None:
|
|
1302
|
+
"""Delete an eval run from Redis.
|
|
1303
|
+
|
|
1304
|
+
Args:
|
|
1305
|
+
eval_run_id (str): The ID of the eval run to delete.
|
|
1306
|
+
|
|
1307
|
+
Raises:
|
|
1308
|
+
Exception: If any error occurs while deleting the eval run.
|
|
1309
|
+
"""
|
|
1310
|
+
try:
|
|
1311
|
+
if self._delete_record(
|
|
1312
|
+
"evals", eval_run_id, index_fields=["agent_id", "team_id", "workflow_id", "model_id", "eval_type"]
|
|
1313
|
+
):
|
|
1314
|
+
log_debug(f"Deleted eval run with ID: {eval_run_id}")
|
|
1315
|
+
else:
|
|
1316
|
+
log_debug(f"No eval run found with ID: {eval_run_id}")
|
|
1317
|
+
|
|
1318
|
+
except Exception as e:
|
|
1319
|
+
log_error(f"Error deleting eval run {eval_run_id}: {e}")
|
|
1320
|
+
raise
|
|
1321
|
+
|
|
1322
|
+
def delete_eval_runs(self, eval_run_ids: List[str]) -> None:
|
|
1323
|
+
"""Delete multiple eval runs from Redis.
|
|
1324
|
+
|
|
1325
|
+
Args:
|
|
1326
|
+
eval_run_ids (List[str]): The IDs of the eval runs to delete.
|
|
1327
|
+
|
|
1328
|
+
Raises:
|
|
1329
|
+
Exception: If any error occurs while deleting the eval runs.
|
|
1330
|
+
"""
|
|
1331
|
+
try:
|
|
1332
|
+
deleted_count = 0
|
|
1333
|
+
for eval_run_id in eval_run_ids:
|
|
1334
|
+
if self._delete_record(
|
|
1335
|
+
"evals", eval_run_id, index_fields=["agent_id", "team_id", "workflow_id", "model_id", "eval_type"]
|
|
1336
|
+
):
|
|
1337
|
+
deleted_count += 1
|
|
1338
|
+
|
|
1339
|
+
if deleted_count == 0:
|
|
1340
|
+
log_debug(f"No eval runs found with IDs: {eval_run_ids}")
|
|
1341
|
+
else:
|
|
1342
|
+
log_debug(f"Deleted {deleted_count} eval runs")
|
|
1343
|
+
|
|
1344
|
+
except Exception as e:
|
|
1345
|
+
log_error(f"Error deleting eval runs {eval_run_ids}: {e}")
|
|
1346
|
+
raise
|
|
1347
|
+
|
|
1348
|
+
def get_eval_run(
|
|
1349
|
+
self, eval_run_id: str, deserialize: Optional[bool] = True
|
|
1350
|
+
) -> Optional[Union[EvalRunRecord, Dict[str, Any]]]:
|
|
1351
|
+
"""Get an eval run from Redis.
|
|
1352
|
+
|
|
1353
|
+
Args:
|
|
1354
|
+
eval_run_id (str): The ID of the eval run to get.
|
|
1355
|
+
|
|
1356
|
+
Returns:
|
|
1357
|
+
Optional[EvalRunRecord]: The eval run if found, None otherwise.
|
|
1358
|
+
|
|
1359
|
+
Raises:
|
|
1360
|
+
Exception: If any error occurs while getting the eval run.
|
|
1361
|
+
"""
|
|
1362
|
+
try:
|
|
1363
|
+
eval_run_raw = self._get_record("evals", eval_run_id)
|
|
1364
|
+
if eval_run_raw is None:
|
|
1365
|
+
return None
|
|
1366
|
+
|
|
1367
|
+
if not deserialize:
|
|
1368
|
+
return eval_run_raw
|
|
1369
|
+
|
|
1370
|
+
return EvalRunRecord.model_validate(eval_run_raw)
|
|
1371
|
+
|
|
1372
|
+
except Exception as e:
|
|
1373
|
+
log_error(f"Exception getting eval run {eval_run_id}: {e}")
|
|
1374
|
+
raise e
|
|
1375
|
+
|
|
1376
|
+
def get_eval_runs(
|
|
1377
|
+
self,
|
|
1378
|
+
limit: Optional[int] = None,
|
|
1379
|
+
page: Optional[int] = None,
|
|
1380
|
+
sort_by: Optional[str] = None,
|
|
1381
|
+
sort_order: Optional[str] = None,
|
|
1382
|
+
agent_id: Optional[str] = None,
|
|
1383
|
+
team_id: Optional[str] = None,
|
|
1384
|
+
workflow_id: Optional[str] = None,
|
|
1385
|
+
model_id: Optional[str] = None,
|
|
1386
|
+
filter_type: Optional[EvalFilterType] = None,
|
|
1387
|
+
eval_type: Optional[List[EvalType]] = None,
|
|
1388
|
+
deserialize: Optional[bool] = True,
|
|
1389
|
+
) -> Union[List[EvalRunRecord], Tuple[List[Dict[str, Any]], int]]:
|
|
1390
|
+
"""Get all eval runs from Redis.
|
|
1391
|
+
|
|
1392
|
+
Args:
|
|
1393
|
+
limit (Optional[int]): The maximum number of eval runs to return.
|
|
1394
|
+
page (Optional[int]): The page number to return.
|
|
1395
|
+
sort_by (Optional[str]): The field to sort by.
|
|
1396
|
+
sort_order (Optional[str]): The order to sort by.
|
|
1397
|
+
|
|
1398
|
+
Returns:
|
|
1399
|
+
List[EvalRunRecord]: The list of eval runs.
|
|
1400
|
+
|
|
1401
|
+
Raises:
|
|
1402
|
+
Exception: If any error occurs while getting the eval runs.
|
|
1403
|
+
"""
|
|
1404
|
+
try:
|
|
1405
|
+
all_eval_runs = self._get_all_records("evals")
|
|
1406
|
+
|
|
1407
|
+
# Apply filters
|
|
1408
|
+
filtered_runs = []
|
|
1409
|
+
for run in all_eval_runs:
|
|
1410
|
+
# Agent/team/workflow filters
|
|
1411
|
+
if agent_id is not None and run.get("agent_id") != agent_id:
|
|
1412
|
+
continue
|
|
1413
|
+
if team_id is not None and run.get("team_id") != team_id:
|
|
1414
|
+
continue
|
|
1415
|
+
if workflow_id is not None and run.get("workflow_id") != workflow_id:
|
|
1416
|
+
continue
|
|
1417
|
+
if model_id is not None and run.get("model_id") != model_id:
|
|
1418
|
+
continue
|
|
1419
|
+
|
|
1420
|
+
# Eval type filter
|
|
1421
|
+
if eval_type is not None and len(eval_type) > 0:
|
|
1422
|
+
if run.get("eval_type") not in eval_type:
|
|
1423
|
+
continue
|
|
1424
|
+
|
|
1425
|
+
# Filter type
|
|
1426
|
+
if filter_type is not None:
|
|
1427
|
+
if filter_type == EvalFilterType.AGENT and run.get("agent_id") is None:
|
|
1428
|
+
continue
|
|
1429
|
+
elif filter_type == EvalFilterType.TEAM and run.get("team_id") is None:
|
|
1430
|
+
continue
|
|
1431
|
+
elif filter_type == EvalFilterType.WORKFLOW and run.get("workflow_id") is None:
|
|
1432
|
+
continue
|
|
1433
|
+
|
|
1434
|
+
filtered_runs.append(run)
|
|
1435
|
+
|
|
1436
|
+
if sort_by is None:
|
|
1437
|
+
sort_by = "created_at"
|
|
1438
|
+
sort_order = "desc"
|
|
1439
|
+
|
|
1440
|
+
sorted_runs = apply_sorting(records=filtered_runs, sort_by=sort_by, sort_order=sort_order)
|
|
1441
|
+
paginated_runs = apply_pagination(records=sorted_runs, limit=limit, page=page)
|
|
1442
|
+
|
|
1443
|
+
if not deserialize:
|
|
1444
|
+
return paginated_runs, len(filtered_runs)
|
|
1445
|
+
|
|
1446
|
+
return [EvalRunRecord.model_validate(row) for row in paginated_runs]
|
|
1447
|
+
|
|
1448
|
+
except Exception as e:
|
|
1449
|
+
log_error(f"Exception getting eval runs: {e}")
|
|
1450
|
+
raise e
|
|
1451
|
+
|
|
1452
|
+
def rename_eval_run(
|
|
1453
|
+
self, eval_run_id: str, name: str, deserialize: Optional[bool] = True
|
|
1454
|
+
) -> Optional[Union[EvalRunRecord, Dict[str, Any]]]:
|
|
1455
|
+
"""Update the name of an eval run in Redis.
|
|
1456
|
+
|
|
1457
|
+
Args:
|
|
1458
|
+
eval_run_id (str): The ID of the eval run to rename.
|
|
1459
|
+
name (str): The new name of the eval run.
|
|
1460
|
+
|
|
1461
|
+
Returns:
|
|
1462
|
+
Optional[Dict[str, Any]]: The updated eval run data if successful, None otherwise.
|
|
1463
|
+
|
|
1464
|
+
Raises:
|
|
1465
|
+
Exception: If any error occurs while updating the eval run name.
|
|
1466
|
+
"""
|
|
1467
|
+
try:
|
|
1468
|
+
eval_run_data = self._get_record("evals", eval_run_id)
|
|
1469
|
+
if eval_run_data is None:
|
|
1470
|
+
return None
|
|
1471
|
+
|
|
1472
|
+
eval_run_data["name"] = name
|
|
1473
|
+
eval_run_data["updated_at"] = int(time.time())
|
|
1474
|
+
|
|
1475
|
+
success = self._store_record("evals", eval_run_id, eval_run_data)
|
|
1476
|
+
if not success:
|
|
1477
|
+
return None
|
|
1478
|
+
|
|
1479
|
+
log_debug(f"Renamed eval run with id '{eval_run_id}' to '{name}'")
|
|
1480
|
+
|
|
1481
|
+
if not deserialize:
|
|
1482
|
+
return eval_run_data
|
|
1483
|
+
|
|
1484
|
+
return EvalRunRecord.model_validate(eval_run_data)
|
|
1485
|
+
|
|
1486
|
+
except Exception as e:
|
|
1487
|
+
log_error(f"Error updating eval run name {eval_run_id}: {e}")
|
|
1488
|
+
raise
|
|
1489
|
+
|
|
1490
|
+
# -- Cultural Knowledge methods --
|
|
1491
|
+
def clear_cultural_knowledge(self) -> None:
|
|
1492
|
+
"""Delete all cultural knowledge from the database.
|
|
1493
|
+
|
|
1494
|
+
Raises:
|
|
1495
|
+
Exception: If an error occurs during deletion.
|
|
1496
|
+
"""
|
|
1497
|
+
try:
|
|
1498
|
+
keys = get_all_keys_for_table(redis_client=self.redis_client, prefix=self.db_prefix, table_type="culture")
|
|
1499
|
+
|
|
1500
|
+
if keys:
|
|
1501
|
+
self.redis_client.delete(*keys)
|
|
1502
|
+
|
|
1503
|
+
except Exception as e:
|
|
1504
|
+
log_error(f"Exception deleting all cultural knowledge: {e}")
|
|
1505
|
+
raise e
|
|
1506
|
+
|
|
1507
|
+
def delete_cultural_knowledge(self, id: str) -> None:
|
|
1508
|
+
"""Delete cultural knowledge by ID.
|
|
1509
|
+
|
|
1510
|
+
Args:
|
|
1511
|
+
id (str): The ID of the cultural knowledge to delete.
|
|
1512
|
+
|
|
1513
|
+
Raises:
|
|
1514
|
+
Exception: If an error occurs during deletion.
|
|
1515
|
+
"""
|
|
1516
|
+
try:
|
|
1517
|
+
if self._delete_record("culture", id, index_fields=["name", "agent_id", "team_id"]):
|
|
1518
|
+
log_debug(f"Successfully deleted cultural knowledge id: {id}")
|
|
1519
|
+
else:
|
|
1520
|
+
log_debug(f"No cultural knowledge found with id: {id}")
|
|
1521
|
+
|
|
1522
|
+
except Exception as e:
|
|
1523
|
+
log_error(f"Error deleting cultural knowledge: {e}")
|
|
1524
|
+
raise e
|
|
1525
|
+
|
|
1526
|
+
def get_cultural_knowledge(
|
|
1527
|
+
self, id: str, deserialize: Optional[bool] = True
|
|
1528
|
+
) -> Optional[Union[CulturalKnowledge, Dict[str, Any]]]:
|
|
1529
|
+
"""Get cultural knowledge by ID.
|
|
1530
|
+
|
|
1531
|
+
Args:
|
|
1532
|
+
id (str): The ID of the cultural knowledge to retrieve.
|
|
1533
|
+
deserialize (Optional[bool]): Whether to deserialize to CulturalKnowledge object. Defaults to True.
|
|
1534
|
+
|
|
1535
|
+
Returns:
|
|
1536
|
+
Optional[Union[CulturalKnowledge, Dict[str, Any]]]: The cultural knowledge if found, None otherwise.
|
|
1537
|
+
|
|
1538
|
+
Raises:
|
|
1539
|
+
Exception: If an error occurs during retrieval.
|
|
1540
|
+
"""
|
|
1541
|
+
try:
|
|
1542
|
+
cultural_knowledge = self._get_record("culture", id)
|
|
1543
|
+
|
|
1544
|
+
if cultural_knowledge is None:
|
|
1545
|
+
return None
|
|
1546
|
+
|
|
1547
|
+
if not deserialize:
|
|
1548
|
+
return cultural_knowledge
|
|
1549
|
+
|
|
1550
|
+
return deserialize_cultural_knowledge_from_db(cultural_knowledge)
|
|
1551
|
+
|
|
1552
|
+
except Exception as e:
|
|
1553
|
+
log_error(f"Error getting cultural knowledge: {e}")
|
|
1554
|
+
raise e
|
|
1555
|
+
|
|
1556
|
+
def get_all_cultural_knowledge(
|
|
1557
|
+
self,
|
|
1558
|
+
agent_id: Optional[str] = None,
|
|
1559
|
+
team_id: Optional[str] = None,
|
|
1560
|
+
name: Optional[str] = None,
|
|
1561
|
+
limit: Optional[int] = None,
|
|
1562
|
+
page: Optional[int] = None,
|
|
1563
|
+
sort_by: Optional[str] = None,
|
|
1564
|
+
sort_order: Optional[str] = None,
|
|
1565
|
+
deserialize: Optional[bool] = True,
|
|
1566
|
+
) -> Union[List[CulturalKnowledge], Tuple[List[Dict[str, Any]], int]]:
|
|
1567
|
+
"""Get all cultural knowledge with filtering and pagination.
|
|
1568
|
+
|
|
1569
|
+
Args:
|
|
1570
|
+
agent_id (Optional[str]): Filter by agent ID.
|
|
1571
|
+
team_id (Optional[str]): Filter by team ID.
|
|
1572
|
+
name (Optional[str]): Filter by name (case-insensitive partial match).
|
|
1573
|
+
limit (Optional[int]): Maximum number of results to return.
|
|
1574
|
+
page (Optional[int]): Page number for pagination.
|
|
1575
|
+
sort_by (Optional[str]): Field to sort by.
|
|
1576
|
+
sort_order (Optional[str]): Sort order ('asc' or 'desc').
|
|
1577
|
+
deserialize (Optional[bool]): Whether to deserialize to CulturalKnowledge objects. Defaults to True.
|
|
1578
|
+
|
|
1579
|
+
Returns:
|
|
1580
|
+
Union[List[CulturalKnowledge], Tuple[List[Dict[str, Any]], int]]:
|
|
1581
|
+
- When deserialize=True: List of CulturalKnowledge objects
|
|
1582
|
+
- When deserialize=False: Tuple with list of dictionaries and total count
|
|
1583
|
+
|
|
1584
|
+
Raises:
|
|
1585
|
+
Exception: If an error occurs during retrieval.
|
|
1586
|
+
"""
|
|
1587
|
+
try:
|
|
1588
|
+
all_cultural_knowledge = self._get_all_records("culture")
|
|
1589
|
+
|
|
1590
|
+
# Apply filters
|
|
1591
|
+
filtered_items = []
|
|
1592
|
+
for item in all_cultural_knowledge:
|
|
1593
|
+
if agent_id is not None and item.get("agent_id") != agent_id:
|
|
1594
|
+
continue
|
|
1595
|
+
if team_id is not None and item.get("team_id") != team_id:
|
|
1596
|
+
continue
|
|
1597
|
+
if name is not None and name.lower() not in item.get("name", "").lower():
|
|
1598
|
+
continue
|
|
1599
|
+
|
|
1600
|
+
filtered_items.append(item)
|
|
1601
|
+
|
|
1602
|
+
sorted_items = apply_sorting(records=filtered_items, sort_by=sort_by, sort_order=sort_order)
|
|
1603
|
+
paginated_items = apply_pagination(records=sorted_items, limit=limit, page=page)
|
|
1604
|
+
|
|
1605
|
+
if not deserialize:
|
|
1606
|
+
return paginated_items, len(filtered_items)
|
|
1607
|
+
|
|
1608
|
+
return [deserialize_cultural_knowledge_from_db(item) for item in paginated_items]
|
|
1609
|
+
|
|
1610
|
+
except Exception as e:
|
|
1611
|
+
log_error(f"Error getting all cultural knowledge: {e}")
|
|
1612
|
+
raise e
|
|
1613
|
+
|
|
1614
|
+
def upsert_cultural_knowledge(
|
|
1615
|
+
self, cultural_knowledge: CulturalKnowledge, deserialize: Optional[bool] = True
|
|
1616
|
+
) -> Optional[Union[CulturalKnowledge, Dict[str, Any]]]:
|
|
1617
|
+
"""Upsert cultural knowledge in Redis.
|
|
1618
|
+
|
|
1619
|
+
Args:
|
|
1620
|
+
cultural_knowledge (CulturalKnowledge): The cultural knowledge to upsert.
|
|
1621
|
+
deserialize (Optional[bool]): Whether to deserialize the result. Defaults to True.
|
|
1622
|
+
|
|
1623
|
+
Returns:
|
|
1624
|
+
Optional[Union[CulturalKnowledge, Dict[str, Any]]]: The upserted cultural knowledge.
|
|
1625
|
+
|
|
1626
|
+
Raises:
|
|
1627
|
+
Exception: If an error occurs during upsert.
|
|
1628
|
+
"""
|
|
1629
|
+
try:
|
|
1630
|
+
# Serialize content, categories, and notes into a dict for DB storage
|
|
1631
|
+
content_dict = serialize_cultural_knowledge_for_db(cultural_knowledge)
|
|
1632
|
+
item_id = cultural_knowledge.id or str(uuid4())
|
|
1633
|
+
|
|
1634
|
+
# Create the item dict with serialized content
|
|
1635
|
+
data = {
|
|
1636
|
+
"id": item_id,
|
|
1637
|
+
"name": cultural_knowledge.name,
|
|
1638
|
+
"summary": cultural_knowledge.summary,
|
|
1639
|
+
"content": content_dict if content_dict else None,
|
|
1640
|
+
"metadata": cultural_knowledge.metadata,
|
|
1641
|
+
"input": cultural_knowledge.input,
|
|
1642
|
+
"created_at": cultural_knowledge.created_at,
|
|
1643
|
+
"updated_at": int(time.time()),
|
|
1644
|
+
"agent_id": cultural_knowledge.agent_id,
|
|
1645
|
+
"team_id": cultural_knowledge.team_id,
|
|
1646
|
+
}
|
|
1647
|
+
|
|
1648
|
+
success = self._store_record("culture", item_id, data, index_fields=["name", "agent_id", "team_id"])
|
|
1649
|
+
|
|
1650
|
+
if not success:
|
|
1651
|
+
return None
|
|
1652
|
+
|
|
1653
|
+
if not deserialize:
|
|
1654
|
+
return data
|
|
1655
|
+
|
|
1656
|
+
return deserialize_cultural_knowledge_from_db(data)
|
|
1657
|
+
|
|
1658
|
+
except Exception as e:
|
|
1659
|
+
log_error(f"Error upserting cultural knowledge: {e}")
|
|
1660
|
+
raise e
|