agno 0.1.2__py3-none-any.whl → 2.3.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agno/__init__.py +8 -0
- agno/agent/__init__.py +44 -5
- agno/agent/agent.py +10531 -2975
- agno/api/agent.py +14 -53
- agno/api/api.py +7 -46
- agno/api/evals.py +22 -0
- agno/api/os.py +17 -0
- agno/api/routes.py +6 -25
- agno/api/schemas/__init__.py +9 -0
- agno/api/schemas/agent.py +6 -9
- agno/api/schemas/evals.py +16 -0
- agno/api/schemas/os.py +14 -0
- agno/api/schemas/team.py +10 -10
- agno/api/schemas/utils.py +21 -0
- agno/api/schemas/workflows.py +16 -0
- agno/api/settings.py +53 -0
- agno/api/team.py +22 -26
- agno/api/workflow.py +28 -0
- agno/cloud/aws/base.py +214 -0
- agno/cloud/aws/s3/__init__.py +2 -0
- agno/cloud/aws/s3/api_client.py +43 -0
- agno/cloud/aws/s3/bucket.py +195 -0
- agno/cloud/aws/s3/object.py +57 -0
- agno/compression/__init__.py +3 -0
- agno/compression/manager.py +247 -0
- agno/culture/__init__.py +3 -0
- agno/culture/manager.py +956 -0
- agno/db/__init__.py +24 -0
- agno/db/async_postgres/__init__.py +3 -0
- agno/db/base.py +946 -0
- agno/db/dynamo/__init__.py +3 -0
- agno/db/dynamo/dynamo.py +2781 -0
- agno/db/dynamo/schemas.py +442 -0
- agno/db/dynamo/utils.py +743 -0
- agno/db/firestore/__init__.py +3 -0
- agno/db/firestore/firestore.py +2379 -0
- agno/db/firestore/schemas.py +181 -0
- agno/db/firestore/utils.py +376 -0
- agno/db/gcs_json/__init__.py +3 -0
- agno/db/gcs_json/gcs_json_db.py +1791 -0
- agno/db/gcs_json/utils.py +228 -0
- agno/db/in_memory/__init__.py +3 -0
- agno/db/in_memory/in_memory_db.py +1312 -0
- agno/db/in_memory/utils.py +230 -0
- agno/db/json/__init__.py +3 -0
- agno/db/json/json_db.py +1777 -0
- agno/db/json/utils.py +230 -0
- agno/db/migrations/manager.py +199 -0
- agno/db/migrations/v1_to_v2.py +635 -0
- agno/db/migrations/versions/v2_3_0.py +938 -0
- agno/db/mongo/__init__.py +17 -0
- agno/db/mongo/async_mongo.py +2760 -0
- agno/db/mongo/mongo.py +2597 -0
- agno/db/mongo/schemas.py +119 -0
- agno/db/mongo/utils.py +276 -0
- agno/db/mysql/__init__.py +4 -0
- agno/db/mysql/async_mysql.py +2912 -0
- agno/db/mysql/mysql.py +2923 -0
- agno/db/mysql/schemas.py +186 -0
- agno/db/mysql/utils.py +488 -0
- agno/db/postgres/__init__.py +4 -0
- agno/db/postgres/async_postgres.py +2579 -0
- agno/db/postgres/postgres.py +2870 -0
- agno/db/postgres/schemas.py +187 -0
- agno/db/postgres/utils.py +442 -0
- agno/db/redis/__init__.py +3 -0
- agno/db/redis/redis.py +2141 -0
- agno/db/redis/schemas.py +159 -0
- agno/db/redis/utils.py +346 -0
- agno/db/schemas/__init__.py +4 -0
- agno/db/schemas/culture.py +120 -0
- agno/db/schemas/evals.py +34 -0
- agno/db/schemas/knowledge.py +40 -0
- agno/db/schemas/memory.py +61 -0
- agno/db/singlestore/__init__.py +3 -0
- agno/db/singlestore/schemas.py +179 -0
- agno/db/singlestore/singlestore.py +2877 -0
- agno/db/singlestore/utils.py +384 -0
- agno/db/sqlite/__init__.py +4 -0
- agno/db/sqlite/async_sqlite.py +2911 -0
- agno/db/sqlite/schemas.py +181 -0
- agno/db/sqlite/sqlite.py +2908 -0
- agno/db/sqlite/utils.py +429 -0
- agno/db/surrealdb/__init__.py +3 -0
- agno/db/surrealdb/metrics.py +292 -0
- agno/db/surrealdb/models.py +334 -0
- agno/db/surrealdb/queries.py +71 -0
- agno/db/surrealdb/surrealdb.py +1908 -0
- agno/db/surrealdb/utils.py +147 -0
- agno/db/utils.py +118 -0
- agno/eval/__init__.py +24 -0
- agno/eval/accuracy.py +666 -276
- agno/eval/agent_as_judge.py +861 -0
- agno/eval/base.py +29 -0
- agno/eval/performance.py +779 -0
- agno/eval/reliability.py +241 -62
- agno/eval/utils.py +120 -0
- agno/exceptions.py +143 -1
- agno/filters.py +354 -0
- agno/guardrails/__init__.py +6 -0
- agno/guardrails/base.py +19 -0
- agno/guardrails/openai.py +144 -0
- agno/guardrails/pii.py +94 -0
- agno/guardrails/prompt_injection.py +52 -0
- agno/hooks/__init__.py +3 -0
- agno/hooks/decorator.py +164 -0
- agno/integrations/discord/__init__.py +3 -0
- agno/integrations/discord/client.py +203 -0
- agno/knowledge/__init__.py +5 -1
- agno/{document → knowledge}/chunking/agentic.py +22 -14
- agno/{document → knowledge}/chunking/document.py +2 -2
- agno/{document → knowledge}/chunking/fixed.py +7 -6
- agno/knowledge/chunking/markdown.py +151 -0
- agno/{document → knowledge}/chunking/recursive.py +15 -3
- agno/knowledge/chunking/row.py +39 -0
- agno/knowledge/chunking/semantic.py +91 -0
- agno/knowledge/chunking/strategy.py +165 -0
- agno/knowledge/content.py +74 -0
- agno/knowledge/document/__init__.py +5 -0
- agno/{document → knowledge/document}/base.py +12 -2
- agno/knowledge/embedder/__init__.py +5 -0
- agno/knowledge/embedder/aws_bedrock.py +343 -0
- agno/knowledge/embedder/azure_openai.py +210 -0
- agno/{embedder → knowledge/embedder}/base.py +8 -0
- agno/knowledge/embedder/cohere.py +323 -0
- agno/knowledge/embedder/fastembed.py +62 -0
- agno/{embedder → knowledge/embedder}/fireworks.py +1 -1
- agno/knowledge/embedder/google.py +258 -0
- agno/knowledge/embedder/huggingface.py +94 -0
- agno/knowledge/embedder/jina.py +182 -0
- agno/knowledge/embedder/langdb.py +22 -0
- agno/knowledge/embedder/mistral.py +206 -0
- agno/knowledge/embedder/nebius.py +13 -0
- agno/knowledge/embedder/ollama.py +154 -0
- agno/knowledge/embedder/openai.py +195 -0
- agno/knowledge/embedder/sentence_transformer.py +63 -0
- agno/{embedder → knowledge/embedder}/together.py +1 -1
- agno/knowledge/embedder/vllm.py +262 -0
- agno/knowledge/embedder/voyageai.py +165 -0
- agno/knowledge/knowledge.py +3006 -0
- agno/knowledge/reader/__init__.py +7 -0
- agno/knowledge/reader/arxiv_reader.py +81 -0
- agno/knowledge/reader/base.py +95 -0
- agno/knowledge/reader/csv_reader.py +164 -0
- agno/knowledge/reader/docx_reader.py +82 -0
- agno/knowledge/reader/field_labeled_csv_reader.py +290 -0
- agno/knowledge/reader/firecrawl_reader.py +201 -0
- agno/knowledge/reader/json_reader.py +88 -0
- agno/knowledge/reader/markdown_reader.py +137 -0
- agno/knowledge/reader/pdf_reader.py +431 -0
- agno/knowledge/reader/pptx_reader.py +101 -0
- agno/knowledge/reader/reader_factory.py +313 -0
- agno/knowledge/reader/s3_reader.py +89 -0
- agno/knowledge/reader/tavily_reader.py +193 -0
- agno/knowledge/reader/text_reader.py +127 -0
- agno/knowledge/reader/web_search_reader.py +325 -0
- agno/knowledge/reader/website_reader.py +455 -0
- agno/knowledge/reader/wikipedia_reader.py +91 -0
- agno/knowledge/reader/youtube_reader.py +78 -0
- agno/knowledge/remote_content/remote_content.py +88 -0
- agno/knowledge/reranker/__init__.py +3 -0
- agno/{reranker → knowledge/reranker}/base.py +1 -1
- agno/{reranker → knowledge/reranker}/cohere.py +2 -2
- agno/knowledge/reranker/infinity.py +195 -0
- agno/knowledge/reranker/sentence_transformer.py +54 -0
- agno/knowledge/types.py +39 -0
- agno/knowledge/utils.py +234 -0
- agno/media.py +439 -95
- agno/memory/__init__.py +16 -3
- agno/memory/manager.py +1474 -123
- agno/memory/strategies/__init__.py +15 -0
- agno/memory/strategies/base.py +66 -0
- agno/memory/strategies/summarize.py +196 -0
- agno/memory/strategies/types.py +37 -0
- agno/models/aimlapi/__init__.py +5 -0
- agno/models/aimlapi/aimlapi.py +62 -0
- agno/models/anthropic/__init__.py +4 -0
- agno/models/anthropic/claude.py +960 -496
- agno/models/aws/__init__.py +15 -0
- agno/models/aws/bedrock.py +686 -451
- agno/models/aws/claude.py +190 -183
- agno/models/azure/__init__.py +18 -1
- agno/models/azure/ai_foundry.py +489 -0
- agno/models/azure/openai_chat.py +89 -40
- agno/models/base.py +2477 -550
- agno/models/cerebras/__init__.py +12 -0
- agno/models/cerebras/cerebras.py +565 -0
- agno/models/cerebras/cerebras_openai.py +131 -0
- agno/models/cohere/__init__.py +4 -0
- agno/models/cohere/chat.py +306 -492
- agno/models/cometapi/__init__.py +5 -0
- agno/models/cometapi/cometapi.py +74 -0
- agno/models/dashscope/__init__.py +5 -0
- agno/models/dashscope/dashscope.py +90 -0
- agno/models/deepinfra/__init__.py +5 -0
- agno/models/deepinfra/deepinfra.py +45 -0
- agno/models/deepseek/__init__.py +4 -0
- agno/models/deepseek/deepseek.py +110 -9
- agno/models/fireworks/__init__.py +4 -0
- agno/models/fireworks/fireworks.py +19 -22
- agno/models/google/__init__.py +3 -7
- agno/models/google/gemini.py +1717 -662
- agno/models/google/utils.py +22 -0
- agno/models/groq/__init__.py +4 -0
- agno/models/groq/groq.py +391 -666
- agno/models/huggingface/__init__.py +4 -0
- agno/models/huggingface/huggingface.py +266 -538
- agno/models/ibm/__init__.py +5 -0
- agno/models/ibm/watsonx.py +432 -0
- agno/models/internlm/__init__.py +3 -0
- agno/models/internlm/internlm.py +20 -3
- agno/models/langdb/__init__.py +1 -0
- agno/models/langdb/langdb.py +60 -0
- agno/models/litellm/__init__.py +14 -0
- agno/models/litellm/chat.py +503 -0
- agno/models/litellm/litellm_openai.py +42 -0
- agno/models/llama_cpp/__init__.py +5 -0
- agno/models/llama_cpp/llama_cpp.py +22 -0
- agno/models/lmstudio/__init__.py +5 -0
- agno/models/lmstudio/lmstudio.py +25 -0
- agno/models/message.py +361 -39
- agno/models/meta/__init__.py +12 -0
- agno/models/meta/llama.py +502 -0
- agno/models/meta/llama_openai.py +79 -0
- agno/models/metrics.py +120 -0
- agno/models/mistral/__init__.py +4 -0
- agno/models/mistral/mistral.py +293 -393
- agno/models/nebius/__init__.py +3 -0
- agno/models/nebius/nebius.py +53 -0
- agno/models/nexus/__init__.py +3 -0
- agno/models/nexus/nexus.py +22 -0
- agno/models/nvidia/__init__.py +4 -0
- agno/models/nvidia/nvidia.py +22 -3
- agno/models/ollama/__init__.py +4 -2
- agno/models/ollama/chat.py +257 -492
- agno/models/openai/__init__.py +7 -0
- agno/models/openai/chat.py +725 -770
- agno/models/openai/like.py +16 -2
- agno/models/openai/responses.py +1121 -0
- agno/models/openrouter/__init__.py +4 -0
- agno/models/openrouter/openrouter.py +62 -5
- agno/models/perplexity/__init__.py +5 -0
- agno/models/perplexity/perplexity.py +203 -0
- agno/models/portkey/__init__.py +3 -0
- agno/models/portkey/portkey.py +82 -0
- agno/models/requesty/__init__.py +5 -0
- agno/models/requesty/requesty.py +69 -0
- agno/models/response.py +177 -7
- agno/models/sambanova/__init__.py +4 -0
- agno/models/sambanova/sambanova.py +23 -4
- agno/models/siliconflow/__init__.py +5 -0
- agno/models/siliconflow/siliconflow.py +42 -0
- agno/models/together/__init__.py +4 -0
- agno/models/together/together.py +21 -164
- agno/models/utils.py +266 -0
- agno/models/vercel/__init__.py +3 -0
- agno/models/vercel/v0.py +43 -0
- agno/models/vertexai/__init__.py +0 -1
- agno/models/vertexai/claude.py +190 -0
- agno/models/vllm/__init__.py +3 -0
- agno/models/vllm/vllm.py +83 -0
- agno/models/xai/__init__.py +2 -0
- agno/models/xai/xai.py +111 -7
- agno/os/__init__.py +3 -0
- agno/os/app.py +1027 -0
- agno/os/auth.py +244 -0
- agno/os/config.py +126 -0
- agno/os/interfaces/__init__.py +1 -0
- agno/os/interfaces/a2a/__init__.py +3 -0
- agno/os/interfaces/a2a/a2a.py +42 -0
- agno/os/interfaces/a2a/router.py +249 -0
- agno/os/interfaces/a2a/utils.py +924 -0
- agno/os/interfaces/agui/__init__.py +3 -0
- agno/os/interfaces/agui/agui.py +47 -0
- agno/os/interfaces/agui/router.py +147 -0
- agno/os/interfaces/agui/utils.py +574 -0
- agno/os/interfaces/base.py +25 -0
- agno/os/interfaces/slack/__init__.py +3 -0
- agno/os/interfaces/slack/router.py +148 -0
- agno/os/interfaces/slack/security.py +30 -0
- agno/os/interfaces/slack/slack.py +47 -0
- agno/os/interfaces/whatsapp/__init__.py +3 -0
- agno/os/interfaces/whatsapp/router.py +210 -0
- agno/os/interfaces/whatsapp/security.py +55 -0
- agno/os/interfaces/whatsapp/whatsapp.py +36 -0
- agno/os/mcp.py +293 -0
- agno/os/middleware/__init__.py +9 -0
- agno/os/middleware/jwt.py +797 -0
- agno/os/router.py +258 -0
- agno/os/routers/__init__.py +3 -0
- agno/os/routers/agents/__init__.py +3 -0
- agno/os/routers/agents/router.py +599 -0
- agno/os/routers/agents/schema.py +261 -0
- agno/os/routers/evals/__init__.py +3 -0
- agno/os/routers/evals/evals.py +450 -0
- agno/os/routers/evals/schemas.py +174 -0
- agno/os/routers/evals/utils.py +231 -0
- agno/os/routers/health.py +31 -0
- agno/os/routers/home.py +52 -0
- agno/os/routers/knowledge/__init__.py +3 -0
- agno/os/routers/knowledge/knowledge.py +1008 -0
- agno/os/routers/knowledge/schemas.py +178 -0
- agno/os/routers/memory/__init__.py +3 -0
- agno/os/routers/memory/memory.py +661 -0
- agno/os/routers/memory/schemas.py +88 -0
- agno/os/routers/metrics/__init__.py +3 -0
- agno/os/routers/metrics/metrics.py +190 -0
- agno/os/routers/metrics/schemas.py +47 -0
- agno/os/routers/session/__init__.py +3 -0
- agno/os/routers/session/session.py +997 -0
- agno/os/routers/teams/__init__.py +3 -0
- agno/os/routers/teams/router.py +512 -0
- agno/os/routers/teams/schema.py +257 -0
- agno/os/routers/traces/__init__.py +3 -0
- agno/os/routers/traces/schemas.py +414 -0
- agno/os/routers/traces/traces.py +499 -0
- agno/os/routers/workflows/__init__.py +3 -0
- agno/os/routers/workflows/router.py +624 -0
- agno/os/routers/workflows/schema.py +75 -0
- agno/os/schema.py +534 -0
- agno/os/scopes.py +469 -0
- agno/{playground → os}/settings.py +7 -15
- agno/os/utils.py +973 -0
- agno/reasoning/anthropic.py +80 -0
- agno/reasoning/azure_ai_foundry.py +67 -0
- agno/reasoning/deepseek.py +63 -0
- agno/reasoning/default.py +97 -0
- agno/reasoning/gemini.py +73 -0
- agno/reasoning/groq.py +71 -0
- agno/reasoning/helpers.py +24 -1
- agno/reasoning/ollama.py +67 -0
- agno/reasoning/openai.py +86 -0
- agno/reasoning/step.py +2 -1
- agno/reasoning/vertexai.py +76 -0
- agno/run/__init__.py +6 -0
- agno/run/agent.py +822 -0
- agno/run/base.py +247 -0
- agno/run/cancel.py +81 -0
- agno/run/requirement.py +181 -0
- agno/run/team.py +767 -0
- agno/run/workflow.py +708 -0
- agno/session/__init__.py +10 -0
- agno/session/agent.py +260 -0
- agno/session/summary.py +265 -0
- agno/session/team.py +342 -0
- agno/session/workflow.py +501 -0
- agno/table.py +10 -0
- agno/team/__init__.py +37 -0
- agno/team/team.py +9536 -0
- agno/tools/__init__.py +7 -0
- agno/tools/agentql.py +120 -0
- agno/tools/airflow.py +22 -12
- agno/tools/api.py +122 -0
- agno/tools/apify.py +276 -83
- agno/tools/{arxiv_toolkit.py → arxiv.py} +20 -12
- agno/tools/aws_lambda.py +28 -7
- agno/tools/aws_ses.py +66 -0
- agno/tools/baidusearch.py +11 -4
- agno/tools/bitbucket.py +292 -0
- agno/tools/brandfetch.py +213 -0
- agno/tools/bravesearch.py +106 -0
- agno/tools/brightdata.py +367 -0
- agno/tools/browserbase.py +209 -0
- agno/tools/calcom.py +32 -23
- agno/tools/calculator.py +24 -37
- agno/tools/cartesia.py +187 -0
- agno/tools/{clickup_tool.py → clickup.py} +17 -28
- agno/tools/confluence.py +91 -26
- agno/tools/crawl4ai.py +139 -43
- agno/tools/csv_toolkit.py +28 -22
- agno/tools/dalle.py +36 -22
- agno/tools/daytona.py +475 -0
- agno/tools/decorator.py +169 -14
- agno/tools/desi_vocal.py +23 -11
- agno/tools/discord.py +32 -29
- agno/tools/docker.py +716 -0
- agno/tools/duckdb.py +76 -81
- agno/tools/duckduckgo.py +43 -40
- agno/tools/e2b.py +703 -0
- agno/tools/eleven_labs.py +65 -54
- agno/tools/email.py +13 -5
- agno/tools/evm.py +129 -0
- agno/tools/exa.py +324 -42
- agno/tools/fal.py +39 -35
- agno/tools/file.py +196 -30
- agno/tools/file_generation.py +356 -0
- agno/tools/financial_datasets.py +288 -0
- agno/tools/firecrawl.py +108 -33
- agno/tools/function.py +960 -122
- agno/tools/giphy.py +34 -12
- agno/tools/github.py +1294 -97
- agno/tools/gmail.py +922 -0
- agno/tools/google_bigquery.py +117 -0
- agno/tools/google_drive.py +271 -0
- agno/tools/google_maps.py +253 -0
- agno/tools/googlecalendar.py +607 -107
- agno/tools/googlesheets.py +377 -0
- agno/tools/hackernews.py +20 -12
- agno/tools/jina.py +24 -14
- agno/tools/jira.py +48 -19
- agno/tools/knowledge.py +218 -0
- agno/tools/linear.py +82 -43
- agno/tools/linkup.py +58 -0
- agno/tools/local_file_system.py +15 -7
- agno/tools/lumalab.py +41 -26
- agno/tools/mcp/__init__.py +10 -0
- agno/tools/mcp/mcp.py +331 -0
- agno/tools/mcp/multi_mcp.py +347 -0
- agno/tools/mcp/params.py +24 -0
- agno/tools/mcp_toolbox.py +284 -0
- agno/tools/mem0.py +193 -0
- agno/tools/memory.py +419 -0
- agno/tools/mlx_transcribe.py +11 -9
- agno/tools/models/azure_openai.py +190 -0
- agno/tools/models/gemini.py +203 -0
- agno/tools/models/groq.py +158 -0
- agno/tools/models/morph.py +186 -0
- agno/tools/models/nebius.py +124 -0
- agno/tools/models_labs.py +163 -82
- agno/tools/moviepy_video.py +18 -13
- agno/tools/nano_banana.py +151 -0
- agno/tools/neo4j.py +134 -0
- agno/tools/newspaper.py +15 -4
- agno/tools/newspaper4k.py +19 -6
- agno/tools/notion.py +204 -0
- agno/tools/openai.py +181 -17
- agno/tools/openbb.py +27 -20
- agno/tools/opencv.py +321 -0
- agno/tools/openweather.py +233 -0
- agno/tools/oxylabs.py +385 -0
- agno/tools/pandas.py +25 -15
- agno/tools/parallel.py +314 -0
- agno/tools/postgres.py +238 -185
- agno/tools/pubmed.py +125 -13
- agno/tools/python.py +48 -35
- agno/tools/reasoning.py +283 -0
- agno/tools/reddit.py +207 -29
- agno/tools/redshift.py +406 -0
- agno/tools/replicate.py +69 -26
- agno/tools/resend.py +11 -6
- agno/tools/scrapegraph.py +179 -19
- agno/tools/searxng.py +23 -31
- agno/tools/serpapi.py +15 -10
- agno/tools/serper.py +255 -0
- agno/tools/shell.py +23 -12
- agno/tools/shopify.py +1519 -0
- agno/tools/slack.py +56 -14
- agno/tools/sleep.py +8 -6
- agno/tools/spider.py +35 -11
- agno/tools/spotify.py +919 -0
- agno/tools/sql.py +34 -19
- agno/tools/tavily.py +158 -8
- agno/tools/telegram.py +18 -8
- agno/tools/todoist.py +218 -0
- agno/tools/toolkit.py +134 -9
- agno/tools/trafilatura.py +388 -0
- agno/tools/trello.py +25 -28
- agno/tools/twilio.py +18 -9
- agno/tools/user_control_flow.py +78 -0
- agno/tools/valyu.py +228 -0
- agno/tools/visualization.py +467 -0
- agno/tools/webbrowser.py +28 -0
- agno/tools/webex.py +76 -0
- agno/tools/website.py +23 -19
- agno/tools/webtools.py +45 -0
- agno/tools/whatsapp.py +286 -0
- agno/tools/wikipedia.py +28 -19
- agno/tools/workflow.py +285 -0
- agno/tools/{twitter.py → x.py} +142 -46
- agno/tools/yfinance.py +41 -39
- agno/tools/youtube.py +34 -17
- agno/tools/zendesk.py +15 -5
- agno/tools/zep.py +454 -0
- agno/tools/zoom.py +86 -37
- agno/tracing/__init__.py +12 -0
- agno/tracing/exporter.py +157 -0
- agno/tracing/schemas.py +276 -0
- agno/tracing/setup.py +111 -0
- agno/utils/agent.py +938 -0
- agno/utils/audio.py +37 -1
- agno/utils/certs.py +27 -0
- agno/utils/code_execution.py +11 -0
- agno/utils/common.py +103 -20
- agno/utils/cryptography.py +22 -0
- agno/utils/dttm.py +33 -0
- agno/utils/events.py +700 -0
- agno/utils/functions.py +107 -37
- agno/utils/gemini.py +426 -0
- agno/utils/hooks.py +171 -0
- agno/utils/http.py +185 -0
- agno/utils/json_schema.py +159 -37
- agno/utils/knowledge.py +36 -0
- agno/utils/location.py +19 -0
- agno/utils/log.py +221 -8
- agno/utils/mcp.py +214 -0
- agno/utils/media.py +335 -14
- agno/utils/merge_dict.py +22 -1
- agno/utils/message.py +77 -2
- agno/utils/models/ai_foundry.py +50 -0
- agno/utils/models/claude.py +373 -0
- agno/utils/models/cohere.py +94 -0
- agno/utils/models/llama.py +85 -0
- agno/utils/models/mistral.py +100 -0
- agno/utils/models/openai_responses.py +140 -0
- agno/utils/models/schema_utils.py +153 -0
- agno/utils/models/watsonx.py +41 -0
- agno/utils/openai.py +257 -0
- agno/utils/pickle.py +1 -1
- agno/utils/pprint.py +124 -8
- agno/utils/print_response/agent.py +930 -0
- agno/utils/print_response/team.py +1914 -0
- agno/utils/print_response/workflow.py +1668 -0
- agno/utils/prompts.py +111 -0
- agno/utils/reasoning.py +108 -0
- agno/utils/response.py +163 -0
- agno/utils/serialize.py +32 -0
- agno/utils/shell.py +4 -4
- agno/utils/streamlit.py +487 -0
- agno/utils/string.py +204 -51
- agno/utils/team.py +139 -0
- agno/utils/timer.py +9 -2
- agno/utils/tokens.py +657 -0
- agno/utils/tools.py +19 -1
- agno/utils/whatsapp.py +305 -0
- agno/utils/yaml_io.py +3 -3
- agno/vectordb/__init__.py +2 -0
- agno/vectordb/base.py +87 -9
- agno/vectordb/cassandra/__init__.py +5 -1
- agno/vectordb/cassandra/cassandra.py +383 -27
- agno/vectordb/chroma/__init__.py +4 -0
- agno/vectordb/chroma/chromadb.py +748 -83
- agno/vectordb/clickhouse/__init__.py +7 -1
- agno/vectordb/clickhouse/clickhousedb.py +554 -53
- agno/vectordb/couchbase/__init__.py +3 -0
- agno/vectordb/couchbase/couchbase.py +1446 -0
- agno/vectordb/lancedb/__init__.py +5 -0
- agno/vectordb/lancedb/lance_db.py +730 -98
- agno/vectordb/langchaindb/__init__.py +5 -0
- agno/vectordb/langchaindb/langchaindb.py +163 -0
- agno/vectordb/lightrag/__init__.py +5 -0
- agno/vectordb/lightrag/lightrag.py +388 -0
- agno/vectordb/llamaindex/__init__.py +3 -0
- agno/vectordb/llamaindex/llamaindexdb.py +166 -0
- agno/vectordb/milvus/__init__.py +3 -0
- agno/vectordb/milvus/milvus.py +966 -78
- agno/vectordb/mongodb/__init__.py +9 -1
- agno/vectordb/mongodb/mongodb.py +1175 -172
- agno/vectordb/pgvector/__init__.py +8 -0
- agno/vectordb/pgvector/pgvector.py +599 -115
- agno/vectordb/pineconedb/__init__.py +5 -1
- agno/vectordb/pineconedb/pineconedb.py +406 -43
- agno/vectordb/qdrant/__init__.py +4 -0
- agno/vectordb/qdrant/qdrant.py +914 -61
- agno/vectordb/redis/__init__.py +9 -0
- agno/vectordb/redis/redisdb.py +682 -0
- agno/vectordb/singlestore/__init__.py +8 -1
- agno/vectordb/singlestore/singlestore.py +771 -0
- agno/vectordb/surrealdb/__init__.py +3 -0
- agno/vectordb/surrealdb/surrealdb.py +663 -0
- agno/vectordb/upstashdb/__init__.py +5 -0
- agno/vectordb/upstashdb/upstashdb.py +718 -0
- agno/vectordb/weaviate/__init__.py +8 -0
- agno/vectordb/weaviate/index.py +15 -0
- agno/vectordb/weaviate/weaviate.py +1009 -0
- agno/workflow/__init__.py +23 -1
- agno/workflow/agent.py +299 -0
- agno/workflow/condition.py +759 -0
- agno/workflow/loop.py +756 -0
- agno/workflow/parallel.py +853 -0
- agno/workflow/router.py +723 -0
- agno/workflow/step.py +1564 -0
- agno/workflow/steps.py +613 -0
- agno/workflow/types.py +556 -0
- agno/workflow/workflow.py +4327 -514
- agno-2.3.13.dist-info/METADATA +639 -0
- agno-2.3.13.dist-info/RECORD +613 -0
- {agno-0.1.2.dist-info → agno-2.3.13.dist-info}/WHEEL +1 -1
- agno-2.3.13.dist-info/licenses/LICENSE +201 -0
- agno/api/playground.py +0 -91
- agno/api/schemas/playground.py +0 -22
- agno/api/schemas/user.py +0 -22
- agno/api/schemas/workspace.py +0 -46
- agno/api/user.py +0 -160
- agno/api/workspace.py +0 -151
- agno/cli/auth_server.py +0 -118
- agno/cli/config.py +0 -275
- agno/cli/console.py +0 -88
- agno/cli/credentials.py +0 -23
- agno/cli/entrypoint.py +0 -571
- agno/cli/operator.py +0 -355
- agno/cli/settings.py +0 -85
- agno/cli/ws/ws_cli.py +0 -817
- agno/constants.py +0 -13
- agno/document/__init__.py +0 -1
- agno/document/chunking/semantic.py +0 -47
- agno/document/chunking/strategy.py +0 -31
- agno/document/reader/__init__.py +0 -1
- agno/document/reader/arxiv_reader.py +0 -41
- agno/document/reader/base.py +0 -22
- agno/document/reader/csv_reader.py +0 -84
- agno/document/reader/docx_reader.py +0 -46
- agno/document/reader/firecrawl_reader.py +0 -99
- agno/document/reader/json_reader.py +0 -43
- agno/document/reader/pdf_reader.py +0 -219
- agno/document/reader/s3/pdf_reader.py +0 -46
- agno/document/reader/s3/text_reader.py +0 -51
- agno/document/reader/text_reader.py +0 -41
- agno/document/reader/website_reader.py +0 -175
- agno/document/reader/youtube_reader.py +0 -50
- agno/embedder/__init__.py +0 -1
- agno/embedder/azure_openai.py +0 -86
- agno/embedder/cohere.py +0 -72
- agno/embedder/fastembed.py +0 -37
- agno/embedder/google.py +0 -73
- agno/embedder/huggingface.py +0 -54
- agno/embedder/mistral.py +0 -80
- agno/embedder/ollama.py +0 -57
- agno/embedder/openai.py +0 -74
- agno/embedder/sentence_transformer.py +0 -38
- agno/embedder/voyageai.py +0 -64
- agno/eval/perf.py +0 -201
- agno/file/__init__.py +0 -1
- agno/file/file.py +0 -16
- agno/file/local/csv.py +0 -32
- agno/file/local/txt.py +0 -19
- agno/infra/app.py +0 -240
- agno/infra/base.py +0 -144
- agno/infra/context.py +0 -20
- agno/infra/db_app.py +0 -52
- agno/infra/resource.py +0 -205
- agno/infra/resources.py +0 -55
- agno/knowledge/agent.py +0 -230
- agno/knowledge/arxiv.py +0 -22
- agno/knowledge/combined.py +0 -22
- agno/knowledge/csv.py +0 -28
- agno/knowledge/csv_url.py +0 -19
- agno/knowledge/document.py +0 -20
- agno/knowledge/docx.py +0 -30
- agno/knowledge/json.py +0 -28
- agno/knowledge/langchain.py +0 -71
- agno/knowledge/llamaindex.py +0 -66
- agno/knowledge/pdf.py +0 -28
- agno/knowledge/pdf_url.py +0 -26
- agno/knowledge/s3/base.py +0 -60
- agno/knowledge/s3/pdf.py +0 -21
- agno/knowledge/s3/text.py +0 -23
- agno/knowledge/text.py +0 -30
- agno/knowledge/website.py +0 -88
- agno/knowledge/wikipedia.py +0 -31
- agno/knowledge/youtube.py +0 -22
- agno/memory/agent.py +0 -392
- agno/memory/classifier.py +0 -104
- agno/memory/db/__init__.py +0 -1
- agno/memory/db/base.py +0 -42
- agno/memory/db/mongodb.py +0 -189
- agno/memory/db/postgres.py +0 -203
- agno/memory/db/sqlite.py +0 -193
- agno/memory/memory.py +0 -15
- agno/memory/row.py +0 -36
- agno/memory/summarizer.py +0 -192
- agno/memory/summary.py +0 -19
- agno/memory/workflow.py +0 -38
- agno/models/google/gemini_openai.py +0 -26
- agno/models/ollama/hermes.py +0 -221
- agno/models/ollama/tools.py +0 -362
- agno/models/vertexai/gemini.py +0 -595
- agno/playground/__init__.py +0 -3
- agno/playground/async_router.py +0 -421
- agno/playground/deploy.py +0 -249
- agno/playground/operator.py +0 -92
- agno/playground/playground.py +0 -91
- agno/playground/schemas.py +0 -76
- agno/playground/serve.py +0 -55
- agno/playground/sync_router.py +0 -405
- agno/reasoning/agent.py +0 -68
- agno/run/response.py +0 -112
- agno/storage/agent/__init__.py +0 -0
- agno/storage/agent/base.py +0 -38
- agno/storage/agent/dynamodb.py +0 -350
- agno/storage/agent/json.py +0 -92
- agno/storage/agent/mongodb.py +0 -228
- agno/storage/agent/postgres.py +0 -367
- agno/storage/agent/session.py +0 -79
- agno/storage/agent/singlestore.py +0 -303
- agno/storage/agent/sqlite.py +0 -357
- agno/storage/agent/yaml.py +0 -93
- agno/storage/workflow/__init__.py +0 -0
- agno/storage/workflow/base.py +0 -40
- agno/storage/workflow/mongodb.py +0 -233
- agno/storage/workflow/postgres.py +0 -366
- agno/storage/workflow/session.py +0 -60
- agno/storage/workflow/sqlite.py +0 -359
- agno/tools/googlesearch.py +0 -88
- agno/utils/defaults.py +0 -57
- agno/utils/filesystem.py +0 -39
- agno/utils/git.py +0 -52
- agno/utils/json_io.py +0 -30
- agno/utils/load_env.py +0 -19
- agno/utils/py_io.py +0 -19
- agno/utils/pyproject.py +0 -18
- agno/utils/resource_filter.py +0 -31
- agno/vectordb/singlestore/s2vectordb.py +0 -390
- agno/vectordb/singlestore/s2vectordb2.py +0 -355
- agno/workspace/__init__.py +0 -0
- agno/workspace/config.py +0 -325
- agno/workspace/enums.py +0 -6
- agno/workspace/helpers.py +0 -48
- agno/workspace/operator.py +0 -758
- agno/workspace/settings.py +0 -63
- agno-0.1.2.dist-info/LICENSE +0 -375
- agno-0.1.2.dist-info/METADATA +0 -502
- agno-0.1.2.dist-info/RECORD +0 -352
- agno-0.1.2.dist-info/entry_points.txt +0 -3
- /agno/{cli → db/migrations}/__init__.py +0 -0
- /agno/{cli/ws → db/migrations/versions}/__init__.py +0 -0
- /agno/{document/chunking/__init__.py → db/schemas/metrics.py} +0 -0
- /agno/{document/reader/s3 → integrations}/__init__.py +0 -0
- /agno/{file/local → knowledge/chunking}/__init__.py +0 -0
- /agno/{infra → knowledge/remote_content}/__init__.py +0 -0
- /agno/{knowledge/s3 → tools/models}/__init__.py +0 -0
- /agno/{reranker → utils/models}/__init__.py +0 -0
- /agno/{storage → utils/print_response}/__init__.py +0 -0
- {agno-0.1.2.dist-info → agno-2.3.13.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,1312 @@
|
|
|
1
|
+
import time
|
|
2
|
+
from copy import deepcopy
|
|
3
|
+
from datetime import date, datetime, timedelta, timezone
|
|
4
|
+
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
|
|
5
|
+
from uuid import uuid4
|
|
6
|
+
|
|
7
|
+
from agno.db.base import BaseDb, SessionType
|
|
8
|
+
from agno.db.in_memory.utils import (
|
|
9
|
+
apply_sorting,
|
|
10
|
+
calculate_date_metrics,
|
|
11
|
+
deserialize_cultural_knowledge_from_db,
|
|
12
|
+
fetch_all_sessions_data,
|
|
13
|
+
get_dates_to_calculate_metrics_for,
|
|
14
|
+
serialize_cultural_knowledge_for_db,
|
|
15
|
+
)
|
|
16
|
+
from agno.db.schemas.culture import CulturalKnowledge
|
|
17
|
+
from agno.db.schemas.evals import EvalFilterType, EvalRunRecord, EvalType
|
|
18
|
+
from agno.db.schemas.knowledge import KnowledgeRow
|
|
19
|
+
from agno.db.schemas.memory import UserMemory
|
|
20
|
+
from agno.session import AgentSession, Session, TeamSession, WorkflowSession
|
|
21
|
+
from agno.utils.log import log_debug, log_error, log_info, log_warning
|
|
22
|
+
|
|
23
|
+
if TYPE_CHECKING:
|
|
24
|
+
from agno.tracing.schemas import Span, Trace
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class InMemoryDb(BaseDb):
|
|
28
|
+
def __init__(self):
|
|
29
|
+
"""Interface for in-memory storage."""
|
|
30
|
+
super().__init__()
|
|
31
|
+
|
|
32
|
+
# Initialize in-memory storage dictionaries
|
|
33
|
+
self._sessions: List[Dict[str, Any]] = []
|
|
34
|
+
self._memories: List[Dict[str, Any]] = []
|
|
35
|
+
self._metrics: List[Dict[str, Any]] = []
|
|
36
|
+
self._eval_runs: List[Dict[str, Any]] = []
|
|
37
|
+
self._knowledge: List[Dict[str, Any]] = []
|
|
38
|
+
self._cultural_knowledge: List[Dict[str, Any]] = []
|
|
39
|
+
|
|
40
|
+
def table_exists(self, table_name: str) -> bool:
|
|
41
|
+
"""In-memory implementation, always returns True."""
|
|
42
|
+
return True
|
|
43
|
+
|
|
44
|
+
def get_latest_schema_version(self):
|
|
45
|
+
"""Get the latest version of the database schema."""
|
|
46
|
+
pass
|
|
47
|
+
|
|
48
|
+
def upsert_schema_version(self, version: str) -> None:
|
|
49
|
+
"""Upsert the schema version into the database."""
|
|
50
|
+
pass
|
|
51
|
+
|
|
52
|
+
# -- Session methods --
|
|
53
|
+
def delete_session(self, session_id: str) -> bool:
|
|
54
|
+
"""Delete a session from in-memory storage.
|
|
55
|
+
|
|
56
|
+
Args:
|
|
57
|
+
session_id (str): The ID of the session to delete.
|
|
58
|
+
|
|
59
|
+
Returns:
|
|
60
|
+
bool: True if the session was deleted, False otherwise.
|
|
61
|
+
|
|
62
|
+
Raises:
|
|
63
|
+
Exception: If an error occurs during deletion.
|
|
64
|
+
"""
|
|
65
|
+
try:
|
|
66
|
+
original_count = len(self._sessions)
|
|
67
|
+
self._sessions = [s for s in self._sessions if s.get("session_id") != session_id]
|
|
68
|
+
|
|
69
|
+
if len(self._sessions) < original_count:
|
|
70
|
+
log_debug(f"Successfully deleted session with session_id: {session_id}")
|
|
71
|
+
return True
|
|
72
|
+
else:
|
|
73
|
+
log_debug(f"No session found to delete with session_id: {session_id}")
|
|
74
|
+
return False
|
|
75
|
+
|
|
76
|
+
except Exception as e:
|
|
77
|
+
log_error(f"Error deleting session: {e}")
|
|
78
|
+
raise e
|
|
79
|
+
|
|
80
|
+
def delete_sessions(self, session_ids: List[str]) -> None:
|
|
81
|
+
"""Delete multiple sessions from in-memory storage.
|
|
82
|
+
|
|
83
|
+
Args:
|
|
84
|
+
session_ids (List[str]): The IDs of the sessions to delete.
|
|
85
|
+
|
|
86
|
+
Raises:
|
|
87
|
+
Exception: If an error occurs during deletion.
|
|
88
|
+
"""
|
|
89
|
+
try:
|
|
90
|
+
self._sessions = [s for s in self._sessions if s.get("session_id") not in session_ids]
|
|
91
|
+
log_debug(f"Successfully deleted sessions with ids: {session_ids}")
|
|
92
|
+
|
|
93
|
+
except Exception as e:
|
|
94
|
+
log_error(f"Error deleting sessions: {e}")
|
|
95
|
+
raise e
|
|
96
|
+
|
|
97
|
+
def get_session(
|
|
98
|
+
self,
|
|
99
|
+
session_id: str,
|
|
100
|
+
session_type: SessionType,
|
|
101
|
+
user_id: Optional[str] = None,
|
|
102
|
+
deserialize: Optional[bool] = True,
|
|
103
|
+
) -> Optional[Union[AgentSession, TeamSession, WorkflowSession, Dict[str, Any]]]:
|
|
104
|
+
"""Read a session from in-memory storage.
|
|
105
|
+
|
|
106
|
+
Args:
|
|
107
|
+
session_id (str): The ID of the session to read.
|
|
108
|
+
session_type (SessionType): The type of the session to read.
|
|
109
|
+
user_id (Optional[str]): The ID of the user to read the session for.
|
|
110
|
+
deserialize (Optional[bool]): Whether to deserialize the session.
|
|
111
|
+
|
|
112
|
+
Returns:
|
|
113
|
+
Union[Session, Dict[str, Any], None]:
|
|
114
|
+
- When deserialize=True: Session object
|
|
115
|
+
- When deserialize=False: Session dictionary
|
|
116
|
+
|
|
117
|
+
Raises:
|
|
118
|
+
Exception: If an error occurs while reading the session.
|
|
119
|
+
"""
|
|
120
|
+
try:
|
|
121
|
+
for session_data in self._sessions:
|
|
122
|
+
if session_data.get("session_id") == session_id:
|
|
123
|
+
if user_id is not None and session_data.get("user_id") != user_id:
|
|
124
|
+
continue
|
|
125
|
+
|
|
126
|
+
session_data_copy = deepcopy(session_data)
|
|
127
|
+
|
|
128
|
+
if not deserialize:
|
|
129
|
+
return session_data_copy
|
|
130
|
+
|
|
131
|
+
if session_type == SessionType.AGENT:
|
|
132
|
+
return AgentSession.from_dict(session_data_copy)
|
|
133
|
+
elif session_type == SessionType.TEAM:
|
|
134
|
+
return TeamSession.from_dict(session_data_copy)
|
|
135
|
+
else:
|
|
136
|
+
return WorkflowSession.from_dict(session_data_copy)
|
|
137
|
+
|
|
138
|
+
return None
|
|
139
|
+
|
|
140
|
+
except Exception as e:
|
|
141
|
+
import traceback
|
|
142
|
+
|
|
143
|
+
traceback.print_exc()
|
|
144
|
+
log_error(f"Exception reading session: {e}")
|
|
145
|
+
raise e
|
|
146
|
+
|
|
147
|
+
def get_sessions(
|
|
148
|
+
self,
|
|
149
|
+
session_type: SessionType,
|
|
150
|
+
user_id: Optional[str] = None,
|
|
151
|
+
component_id: Optional[str] = None,
|
|
152
|
+
session_name: Optional[str] = None,
|
|
153
|
+
start_timestamp: Optional[int] = None,
|
|
154
|
+
end_timestamp: Optional[int] = None,
|
|
155
|
+
limit: Optional[int] = None,
|
|
156
|
+
page: Optional[int] = None,
|
|
157
|
+
sort_by: Optional[str] = None,
|
|
158
|
+
sort_order: Optional[str] = None,
|
|
159
|
+
deserialize: Optional[bool] = True,
|
|
160
|
+
) -> Union[List[Session], Tuple[List[Dict[str, Any]], int]]:
|
|
161
|
+
"""Get all sessions from in-memory storage with filtering and pagination.
|
|
162
|
+
|
|
163
|
+
Args:
|
|
164
|
+
session_type (SessionType): The type of the sessions to read.
|
|
165
|
+
user_id (Optional[str]): The ID of the user to read the sessions for.
|
|
166
|
+
component_id (Optional[str]): The ID of the component to read the sessions for.
|
|
167
|
+
session_name (Optional[str]): The name of the session to read.
|
|
168
|
+
start_timestamp (Optional[int]): The start timestamp of the sessions to read.
|
|
169
|
+
end_timestamp (Optional[int]): The end timestamp of the sessions to read.
|
|
170
|
+
limit (Optional[int]): The limit of the sessions to read.
|
|
171
|
+
page (Optional[int]): The page of the sessions to read.
|
|
172
|
+
sort_by (Optional[str]): The field to sort the sessions by.
|
|
173
|
+
sort_order (Optional[str]): The order to sort the sessions by.
|
|
174
|
+
deserialize (Optional[bool]): Whether to deserialize the sessions.
|
|
175
|
+
|
|
176
|
+
Returns:
|
|
177
|
+
Union[List[AgentSession], List[TeamSession], List[WorkflowSession], Tuple[List[Dict[str, Any]], int]]:
|
|
178
|
+
- When deserialize=True: List of sessions
|
|
179
|
+
- When deserialize=False: Tuple with list of sessions and total count
|
|
180
|
+
|
|
181
|
+
Raises:
|
|
182
|
+
Exception: If an error occurs while reading the sessions.
|
|
183
|
+
"""
|
|
184
|
+
try:
|
|
185
|
+
# Apply filters
|
|
186
|
+
filtered_sessions = []
|
|
187
|
+
for session_data in self._sessions:
|
|
188
|
+
if user_id is not None and session_data.get("user_id") != user_id:
|
|
189
|
+
continue
|
|
190
|
+
if component_id is not None:
|
|
191
|
+
if session_type == SessionType.AGENT and session_data.get("agent_id") != component_id:
|
|
192
|
+
continue
|
|
193
|
+
elif session_type == SessionType.TEAM and session_data.get("team_id") != component_id:
|
|
194
|
+
continue
|
|
195
|
+
elif session_type == SessionType.WORKFLOW and session_data.get("workflow_id") != component_id:
|
|
196
|
+
continue
|
|
197
|
+
if start_timestamp is not None and session_data.get("created_at", 0) < start_timestamp:
|
|
198
|
+
continue
|
|
199
|
+
if end_timestamp is not None and session_data.get("created_at", 0) > end_timestamp:
|
|
200
|
+
continue
|
|
201
|
+
if session_name is not None:
|
|
202
|
+
stored_name = session_data.get("session_data", {}).get("session_name", "")
|
|
203
|
+
if session_name.lower() not in stored_name.lower():
|
|
204
|
+
continue
|
|
205
|
+
session_type_value = session_type.value if isinstance(session_type, SessionType) else session_type
|
|
206
|
+
if session_data.get("session_type") != session_type_value:
|
|
207
|
+
continue
|
|
208
|
+
|
|
209
|
+
filtered_sessions.append(deepcopy(session_data))
|
|
210
|
+
|
|
211
|
+
total_count = len(filtered_sessions)
|
|
212
|
+
|
|
213
|
+
# Apply sorting
|
|
214
|
+
filtered_sessions = apply_sorting(filtered_sessions, sort_by, sort_order)
|
|
215
|
+
|
|
216
|
+
# Apply pagination
|
|
217
|
+
if limit is not None:
|
|
218
|
+
start_idx = 0
|
|
219
|
+
if page is not None:
|
|
220
|
+
start_idx = (page - 1) * limit
|
|
221
|
+
filtered_sessions = filtered_sessions[start_idx : start_idx + limit]
|
|
222
|
+
|
|
223
|
+
if not deserialize:
|
|
224
|
+
return filtered_sessions, total_count
|
|
225
|
+
|
|
226
|
+
if session_type == SessionType.AGENT:
|
|
227
|
+
return [AgentSession.from_dict(session) for session in filtered_sessions] # type: ignore
|
|
228
|
+
elif session_type == SessionType.TEAM:
|
|
229
|
+
return [TeamSession.from_dict(session) for session in filtered_sessions] # type: ignore
|
|
230
|
+
elif session_type == SessionType.WORKFLOW:
|
|
231
|
+
return [WorkflowSession.from_dict(session) for session in filtered_sessions] # type: ignore
|
|
232
|
+
else:
|
|
233
|
+
raise ValueError(f"Invalid session type: {session_type}")
|
|
234
|
+
|
|
235
|
+
except Exception as e:
|
|
236
|
+
log_error(f"Exception reading sessions: {e}")
|
|
237
|
+
raise e
|
|
238
|
+
|
|
239
|
+
def rename_session(
|
|
240
|
+
self, session_id: str, session_type: SessionType, session_name: str, deserialize: Optional[bool] = True
|
|
241
|
+
) -> Optional[Union[Session, Dict[str, Any]]]:
|
|
242
|
+
try:
|
|
243
|
+
for i, session in enumerate(self._sessions):
|
|
244
|
+
if session.get("session_id") == session_id and session.get("session_type") == session_type.value:
|
|
245
|
+
# Update session name in session_data
|
|
246
|
+
if "session_data" not in session:
|
|
247
|
+
session["session_data"] = {}
|
|
248
|
+
session["session_data"]["session_name"] = session_name
|
|
249
|
+
|
|
250
|
+
self._sessions[i] = session
|
|
251
|
+
|
|
252
|
+
log_debug(f"Renamed session with id '{session_id}' to '{session_name}'")
|
|
253
|
+
|
|
254
|
+
session_copy = deepcopy(session)
|
|
255
|
+
if not deserialize:
|
|
256
|
+
return session_copy
|
|
257
|
+
|
|
258
|
+
if session_type == SessionType.AGENT:
|
|
259
|
+
return AgentSession.from_dict(session_copy)
|
|
260
|
+
elif session_type == SessionType.TEAM:
|
|
261
|
+
return TeamSession.from_dict(session_copy)
|
|
262
|
+
else:
|
|
263
|
+
return WorkflowSession.from_dict(session_copy)
|
|
264
|
+
|
|
265
|
+
return None
|
|
266
|
+
|
|
267
|
+
except Exception as e:
|
|
268
|
+
log_error(f"Exception renaming session: {e}")
|
|
269
|
+
raise e
|
|
270
|
+
|
|
271
|
+
def upsert_session(
|
|
272
|
+
self, session: Session, deserialize: Optional[bool] = True
|
|
273
|
+
) -> Optional[Union[Session, Dict[str, Any]]]:
|
|
274
|
+
try:
|
|
275
|
+
session_dict = session.to_dict()
|
|
276
|
+
|
|
277
|
+
# Add session_type based on session instance type
|
|
278
|
+
if isinstance(session, AgentSession):
|
|
279
|
+
session_dict["session_type"] = SessionType.AGENT.value
|
|
280
|
+
elif isinstance(session, TeamSession):
|
|
281
|
+
session_dict["session_type"] = SessionType.TEAM.value
|
|
282
|
+
elif isinstance(session, WorkflowSession):
|
|
283
|
+
session_dict["session_type"] = SessionType.WORKFLOW.value
|
|
284
|
+
|
|
285
|
+
# Find existing session to update
|
|
286
|
+
session_updated = False
|
|
287
|
+
for i, existing_session in enumerate(self._sessions):
|
|
288
|
+
if existing_session.get("session_id") == session_dict.get("session_id") and self._matches_session_key(
|
|
289
|
+
existing_session, session
|
|
290
|
+
):
|
|
291
|
+
session_dict["updated_at"] = int(time.time())
|
|
292
|
+
self._sessions[i] = deepcopy(session_dict)
|
|
293
|
+
session_updated = True
|
|
294
|
+
break
|
|
295
|
+
|
|
296
|
+
if not session_updated:
|
|
297
|
+
session_dict["created_at"] = session_dict.get("created_at", int(time.time()))
|
|
298
|
+
session_dict["updated_at"] = session_dict.get("created_at")
|
|
299
|
+
self._sessions.append(deepcopy(session_dict))
|
|
300
|
+
|
|
301
|
+
session_dict_copy = deepcopy(session_dict)
|
|
302
|
+
if not deserialize:
|
|
303
|
+
return session_dict_copy
|
|
304
|
+
|
|
305
|
+
if session_dict_copy["session_type"] == SessionType.AGENT:
|
|
306
|
+
return AgentSession.from_dict(session_dict_copy)
|
|
307
|
+
elif session_dict_copy["session_type"] == SessionType.TEAM:
|
|
308
|
+
return TeamSession.from_dict(session_dict_copy)
|
|
309
|
+
else:
|
|
310
|
+
return WorkflowSession.from_dict(session_dict_copy)
|
|
311
|
+
|
|
312
|
+
except Exception as e:
|
|
313
|
+
log_error(f"Exception upserting session: {e}")
|
|
314
|
+
raise e
|
|
315
|
+
|
|
316
|
+
def _matches_session_key(self, existing_session: Dict[str, Any], session: Session) -> bool:
|
|
317
|
+
"""Check if existing session matches the key for the session type."""
|
|
318
|
+
if isinstance(session, AgentSession):
|
|
319
|
+
return existing_session.get("agent_id") == session.agent_id
|
|
320
|
+
elif isinstance(session, TeamSession):
|
|
321
|
+
return existing_session.get("team_id") == session.team_id
|
|
322
|
+
elif isinstance(session, WorkflowSession):
|
|
323
|
+
return existing_session.get("workflow_id") == session.workflow_id
|
|
324
|
+
return False
|
|
325
|
+
|
|
326
|
+
def upsert_sessions(
|
|
327
|
+
self, sessions: List[Session], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
|
|
328
|
+
) -> List[Union[Session, Dict[str, Any]]]:
|
|
329
|
+
"""
|
|
330
|
+
Bulk upsert multiple sessions for improved performance on large datasets.
|
|
331
|
+
|
|
332
|
+
Args:
|
|
333
|
+
sessions (List[Session]): List of sessions to upsert.
|
|
334
|
+
deserialize (Optional[bool]): Whether to deserialize the sessions. Defaults to True.
|
|
335
|
+
|
|
336
|
+
Returns:
|
|
337
|
+
List[Union[Session, Dict[str, Any]]]: List of upserted sessions.
|
|
338
|
+
|
|
339
|
+
Raises:
|
|
340
|
+
Exception: If an error occurs during bulk upsert.
|
|
341
|
+
"""
|
|
342
|
+
if not sessions:
|
|
343
|
+
return []
|
|
344
|
+
|
|
345
|
+
try:
|
|
346
|
+
log_info(f"In-memory database: processing {len(sessions)} sessions with individual upsert operations")
|
|
347
|
+
|
|
348
|
+
results = []
|
|
349
|
+
for session in sessions:
|
|
350
|
+
if session is not None:
|
|
351
|
+
result = self.upsert_session(session, deserialize=deserialize)
|
|
352
|
+
if result is not None:
|
|
353
|
+
results.append(result)
|
|
354
|
+
return results
|
|
355
|
+
|
|
356
|
+
except Exception as e:
|
|
357
|
+
log_error(f"Exception during bulk session upsert: {e}")
|
|
358
|
+
return []
|
|
359
|
+
|
|
360
|
+
# -- Memory methods --
|
|
361
|
+
def delete_user_memory(self, memory_id: str, user_id: Optional[str] = None):
|
|
362
|
+
"""Delete a user memory from in-memory storage.
|
|
363
|
+
|
|
364
|
+
Args:
|
|
365
|
+
memory_id (str): The ID of the memory to delete.
|
|
366
|
+
user_id (Optional[str]): The ID of the user. If provided, verifies the memory belongs to this user before deletion.
|
|
367
|
+
|
|
368
|
+
Raises:
|
|
369
|
+
Exception: If an error occurs during deletion.
|
|
370
|
+
"""
|
|
371
|
+
try:
|
|
372
|
+
original_count = len(self._memories)
|
|
373
|
+
|
|
374
|
+
# If user_id is provided, verify ownership before deleting
|
|
375
|
+
if user_id is not None:
|
|
376
|
+
self._memories = [
|
|
377
|
+
m for m in self._memories if not (m.get("memory_id") == memory_id and m.get("user_id") == user_id)
|
|
378
|
+
]
|
|
379
|
+
else:
|
|
380
|
+
self._memories = [m for m in self._memories if m.get("memory_id") != memory_id]
|
|
381
|
+
|
|
382
|
+
if len(self._memories) < original_count:
|
|
383
|
+
log_debug(f"Successfully deleted user memory id: {memory_id}")
|
|
384
|
+
else:
|
|
385
|
+
log_debug(f"No memory found with id: {memory_id}")
|
|
386
|
+
|
|
387
|
+
except Exception as e:
|
|
388
|
+
log_error(f"Error deleting memory: {e}")
|
|
389
|
+
raise e
|
|
390
|
+
|
|
391
|
+
def delete_user_memories(self, memory_ids: List[str], user_id: Optional[str] = None) -> None:
|
|
392
|
+
"""Delete multiple user memories from in-memory storage.
|
|
393
|
+
|
|
394
|
+
Args:
|
|
395
|
+
memory_ids (List[str]): The IDs of the memories to delete.
|
|
396
|
+
user_id (Optional[str]): The ID of the user. If provided, only deletes memories belonging to this user.
|
|
397
|
+
|
|
398
|
+
Raises:
|
|
399
|
+
Exception: If an error occurs during deletion.
|
|
400
|
+
"""
|
|
401
|
+
try:
|
|
402
|
+
# If user_id is provided, verify ownership before deleting
|
|
403
|
+
if user_id is not None:
|
|
404
|
+
self._memories = [
|
|
405
|
+
m for m in self._memories if not (m.get("memory_id") in memory_ids and m.get("user_id") == user_id)
|
|
406
|
+
]
|
|
407
|
+
else:
|
|
408
|
+
self._memories = [m for m in self._memories if m.get("memory_id") not in memory_ids]
|
|
409
|
+
log_debug(f"Successfully deleted {len(memory_ids)} user memories")
|
|
410
|
+
|
|
411
|
+
except Exception as e:
|
|
412
|
+
log_error(f"Error deleting memories: {e}")
|
|
413
|
+
raise e
|
|
414
|
+
|
|
415
|
+
def get_all_memory_topics(self) -> List[str]:
|
|
416
|
+
"""Get all memory topics from in-memory storage.
|
|
417
|
+
|
|
418
|
+
Returns:
|
|
419
|
+
List[str]: List of unique topics.
|
|
420
|
+
|
|
421
|
+
Raises:
|
|
422
|
+
Exception: If an error occurs while reading topics.
|
|
423
|
+
"""
|
|
424
|
+
try:
|
|
425
|
+
topics = set()
|
|
426
|
+
for memory in self._memories:
|
|
427
|
+
memory_topics = memory.get("topics", [])
|
|
428
|
+
if isinstance(memory_topics, list):
|
|
429
|
+
topics.update(memory_topics)
|
|
430
|
+
return list(topics)
|
|
431
|
+
|
|
432
|
+
except Exception as e:
|
|
433
|
+
log_error(f"Exception reading from memory storage: {e}")
|
|
434
|
+
raise e
|
|
435
|
+
|
|
436
|
+
def get_user_memory(
|
|
437
|
+
self, memory_id: str, deserialize: Optional[bool] = True, user_id: Optional[str] = None
|
|
438
|
+
) -> Optional[Union[UserMemory, Dict[str, Any]]]:
|
|
439
|
+
"""Get a user memory from in-memory storage.
|
|
440
|
+
|
|
441
|
+
Args:
|
|
442
|
+
memory_id (str): The ID of the memory to retrieve.
|
|
443
|
+
deserialize (Optional[bool]): Whether to deserialize the memory. Defaults to True.
|
|
444
|
+
user_id (Optional[str]): The ID of the user. If provided, only returns the memory if it belongs to this user.
|
|
445
|
+
|
|
446
|
+
Returns:
|
|
447
|
+
Optional[Union[UserMemory, Dict[str, Any]]]: The memory object or dictionary, or None if not found.
|
|
448
|
+
|
|
449
|
+
Raises:
|
|
450
|
+
Exception: If an error occurs while reading the memory.
|
|
451
|
+
"""
|
|
452
|
+
try:
|
|
453
|
+
for memory_data in self._memories:
|
|
454
|
+
if memory_data.get("memory_id") == memory_id:
|
|
455
|
+
# Filter by user_id if provided
|
|
456
|
+
if user_id is not None and memory_data.get("user_id") != user_id:
|
|
457
|
+
continue
|
|
458
|
+
|
|
459
|
+
memory_data_copy = deepcopy(memory_data)
|
|
460
|
+
if not deserialize:
|
|
461
|
+
return memory_data_copy
|
|
462
|
+
return UserMemory.from_dict(memory_data_copy)
|
|
463
|
+
|
|
464
|
+
return None
|
|
465
|
+
|
|
466
|
+
except Exception as e:
|
|
467
|
+
log_error(f"Exception reading from memory storage: {e}")
|
|
468
|
+
raise e
|
|
469
|
+
|
|
470
|
+
def get_user_memories(
|
|
471
|
+
self,
|
|
472
|
+
user_id: Optional[str] = None,
|
|
473
|
+
agent_id: Optional[str] = None,
|
|
474
|
+
team_id: Optional[str] = None,
|
|
475
|
+
topics: Optional[List[str]] = None,
|
|
476
|
+
search_content: Optional[str] = None,
|
|
477
|
+
limit: Optional[int] = None,
|
|
478
|
+
page: Optional[int] = None,
|
|
479
|
+
sort_by: Optional[str] = None,
|
|
480
|
+
sort_order: Optional[str] = None,
|
|
481
|
+
deserialize: Optional[bool] = True,
|
|
482
|
+
) -> Union[List[UserMemory], Tuple[List[Dict[str, Any]], int]]:
|
|
483
|
+
try:
|
|
484
|
+
# Apply filters
|
|
485
|
+
filtered_memories = []
|
|
486
|
+
for memory_data in self._memories:
|
|
487
|
+
if user_id is not None and memory_data.get("user_id") != user_id:
|
|
488
|
+
continue
|
|
489
|
+
if agent_id is not None and memory_data.get("agent_id") != agent_id:
|
|
490
|
+
continue
|
|
491
|
+
if team_id is not None and memory_data.get("team_id") != team_id:
|
|
492
|
+
continue
|
|
493
|
+
if topics is not None:
|
|
494
|
+
memory_topics = memory_data.get("topics", [])
|
|
495
|
+
if not any(topic in memory_topics for topic in topics):
|
|
496
|
+
continue
|
|
497
|
+
if search_content is not None:
|
|
498
|
+
memory_content = str(memory_data.get("memory", ""))
|
|
499
|
+
if search_content.lower() not in memory_content.lower():
|
|
500
|
+
continue
|
|
501
|
+
|
|
502
|
+
filtered_memories.append(deepcopy(memory_data))
|
|
503
|
+
|
|
504
|
+
total_count = len(filtered_memories)
|
|
505
|
+
|
|
506
|
+
# Apply sorting
|
|
507
|
+
filtered_memories = apply_sorting(filtered_memories, sort_by, sort_order)
|
|
508
|
+
|
|
509
|
+
# Apply pagination
|
|
510
|
+
if limit is not None:
|
|
511
|
+
start_idx = 0
|
|
512
|
+
if page is not None:
|
|
513
|
+
start_idx = (page - 1) * limit
|
|
514
|
+
filtered_memories = filtered_memories[start_idx : start_idx + limit]
|
|
515
|
+
|
|
516
|
+
if not deserialize:
|
|
517
|
+
return filtered_memories, total_count
|
|
518
|
+
|
|
519
|
+
return [UserMemory.from_dict(memory) for memory in filtered_memories]
|
|
520
|
+
|
|
521
|
+
except Exception as e:
|
|
522
|
+
log_error(f"Exception reading from memory storage: {e}")
|
|
523
|
+
raise e
|
|
524
|
+
|
|
525
|
+
def get_user_memory_stats(
|
|
526
|
+
self, limit: Optional[int] = None, page: Optional[int] = None, user_id: Optional[str] = None
|
|
527
|
+
) -> Tuple[List[Dict[str, Any]], int]:
|
|
528
|
+
"""Get user memory statistics.
|
|
529
|
+
|
|
530
|
+
Args:
|
|
531
|
+
limit (Optional[int]): Maximum number of stats to return.
|
|
532
|
+
page (Optional[int]): Page number for pagination.
|
|
533
|
+
user_id (Optional[str]): User ID for filtering.
|
|
534
|
+
|
|
535
|
+
Returns:
|
|
536
|
+
Tuple[List[Dict[str, Any]], int]: List of user memory statistics and total count.
|
|
537
|
+
|
|
538
|
+
Raises:
|
|
539
|
+
Exception: If an error occurs while getting stats.
|
|
540
|
+
"""
|
|
541
|
+
try:
|
|
542
|
+
user_stats = {}
|
|
543
|
+
|
|
544
|
+
for memory in self._memories:
|
|
545
|
+
memory_user_id = memory.get("user_id")
|
|
546
|
+
# filter by user_id if provided
|
|
547
|
+
if user_id is not None and memory_user_id != user_id:
|
|
548
|
+
continue
|
|
549
|
+
if memory_user_id:
|
|
550
|
+
if memory_user_id not in user_stats:
|
|
551
|
+
user_stats[memory_user_id] = {
|
|
552
|
+
"user_id": memory_user_id,
|
|
553
|
+
"total_memories": 0,
|
|
554
|
+
"last_memory_updated_at": 0,
|
|
555
|
+
}
|
|
556
|
+
user_stats[memory_user_id]["total_memories"] += 1
|
|
557
|
+
updated_at = memory.get("updated_at", 0)
|
|
558
|
+
if updated_at > user_stats[memory_user_id]["last_memory_updated_at"]:
|
|
559
|
+
user_stats[memory_user_id]["last_memory_updated_at"] = updated_at
|
|
560
|
+
|
|
561
|
+
stats_list = list(user_stats.values())
|
|
562
|
+
stats_list.sort(key=lambda x: x["last_memory_updated_at"], reverse=True)
|
|
563
|
+
|
|
564
|
+
total_count = len(stats_list)
|
|
565
|
+
|
|
566
|
+
# Apply pagination
|
|
567
|
+
if limit is not None:
|
|
568
|
+
start_idx = 0
|
|
569
|
+
if page is not None:
|
|
570
|
+
start_idx = (page - 1) * limit
|
|
571
|
+
stats_list = stats_list[start_idx : start_idx + limit]
|
|
572
|
+
|
|
573
|
+
return stats_list, total_count
|
|
574
|
+
|
|
575
|
+
except Exception as e:
|
|
576
|
+
log_error(f"Exception getting user memory stats: {e}")
|
|
577
|
+
raise e
|
|
578
|
+
|
|
579
|
+
def upsert_user_memory(
|
|
580
|
+
self, memory: UserMemory, deserialize: Optional[bool] = True
|
|
581
|
+
) -> Optional[Union[UserMemory, Dict[str, Any]]]:
|
|
582
|
+
try:
|
|
583
|
+
if memory.memory_id is None:
|
|
584
|
+
memory.memory_id = str(uuid4())
|
|
585
|
+
|
|
586
|
+
memory_dict = memory.to_dict() if hasattr(memory, "to_dict") else memory.__dict__
|
|
587
|
+
memory_dict["updated_at"] = int(time.time())
|
|
588
|
+
|
|
589
|
+
# Find existing memory to update
|
|
590
|
+
memory_updated = False
|
|
591
|
+
for i, existing_memory in enumerate(self._memories):
|
|
592
|
+
if existing_memory.get("memory_id") == memory.memory_id:
|
|
593
|
+
self._memories[i] = memory_dict
|
|
594
|
+
memory_updated = True
|
|
595
|
+
break
|
|
596
|
+
|
|
597
|
+
if not memory_updated:
|
|
598
|
+
self._memories.append(memory_dict)
|
|
599
|
+
|
|
600
|
+
memory_dict_copy = deepcopy(memory_dict)
|
|
601
|
+
if not deserialize:
|
|
602
|
+
return memory_dict_copy
|
|
603
|
+
|
|
604
|
+
return UserMemory.from_dict(memory_dict_copy)
|
|
605
|
+
|
|
606
|
+
except Exception as e:
|
|
607
|
+
log_warning(f"Exception upserting user memory: {e}")
|
|
608
|
+
raise e
|
|
609
|
+
|
|
610
|
+
def upsert_memories(
|
|
611
|
+
self, memories: List[UserMemory], deserialize: Optional[bool] = True, preserve_updated_at: bool = False
|
|
612
|
+
) -> List[Union[UserMemory, Dict[str, Any]]]:
|
|
613
|
+
"""
|
|
614
|
+
Bulk upsert multiple user memories for improved performance on large datasets.
|
|
615
|
+
|
|
616
|
+
Args:
|
|
617
|
+
memories (List[UserMemory]): List of memories to upsert.
|
|
618
|
+
deserialize (Optional[bool]): Whether to deserialize the memories. Defaults to True.
|
|
619
|
+
|
|
620
|
+
Returns:
|
|
621
|
+
List[Union[UserMemory, Dict[str, Any]]]: List of upserted memories.
|
|
622
|
+
|
|
623
|
+
Raises:
|
|
624
|
+
Exception: If an error occurs during bulk upsert.
|
|
625
|
+
"""
|
|
626
|
+
if not memories:
|
|
627
|
+
return []
|
|
628
|
+
|
|
629
|
+
try:
|
|
630
|
+
log_info(f"In-memory database: processing {len(memories)} memories with individual upsert operations")
|
|
631
|
+
# For in-memory database, individual upserts are actually efficient
|
|
632
|
+
# since we're just manipulating Python lists and dictionaries
|
|
633
|
+
results = []
|
|
634
|
+
for memory in memories:
|
|
635
|
+
if memory is not None:
|
|
636
|
+
result = self.upsert_user_memory(memory, deserialize=deserialize)
|
|
637
|
+
if result is not None:
|
|
638
|
+
results.append(result)
|
|
639
|
+
return results
|
|
640
|
+
|
|
641
|
+
except Exception as e:
|
|
642
|
+
log_error(f"Exception during bulk memory upsert: {e}")
|
|
643
|
+
return []
|
|
644
|
+
|
|
645
|
+
def clear_memories(self) -> None:
|
|
646
|
+
"""Delete all memories.
|
|
647
|
+
|
|
648
|
+
Raises:
|
|
649
|
+
Exception: If an error occurs during deletion.
|
|
650
|
+
"""
|
|
651
|
+
try:
|
|
652
|
+
self._memories.clear()
|
|
653
|
+
|
|
654
|
+
except Exception as e:
|
|
655
|
+
log_warning(f"Exception deleting all memories: {e}")
|
|
656
|
+
raise e
|
|
657
|
+
|
|
658
|
+
# -- Metrics methods --
|
|
659
|
+
def calculate_metrics(self) -> Optional[list[dict]]:
|
|
660
|
+
"""Calculate metrics for all dates without complete metrics."""
|
|
661
|
+
try:
|
|
662
|
+
starting_date = self._get_metrics_calculation_starting_date(self._metrics)
|
|
663
|
+
if starting_date is None:
|
|
664
|
+
log_info("No session data found. Won't calculate metrics.")
|
|
665
|
+
return None
|
|
666
|
+
|
|
667
|
+
dates_to_process = get_dates_to_calculate_metrics_for(starting_date)
|
|
668
|
+
if not dates_to_process:
|
|
669
|
+
log_info("Metrics already calculated for all relevant dates.")
|
|
670
|
+
return None
|
|
671
|
+
|
|
672
|
+
start_timestamp = int(datetime.combine(dates_to_process[0], datetime.min.time()).timestamp())
|
|
673
|
+
end_timestamp = int(
|
|
674
|
+
datetime.combine(dates_to_process[-1] + timedelta(days=1), datetime.min.time()).timestamp()
|
|
675
|
+
)
|
|
676
|
+
|
|
677
|
+
sessions = self._get_all_sessions_for_metrics_calculation(start_timestamp, end_timestamp)
|
|
678
|
+
all_sessions_data = fetch_all_sessions_data(
|
|
679
|
+
sessions=sessions, dates_to_process=dates_to_process, start_timestamp=start_timestamp
|
|
680
|
+
)
|
|
681
|
+
if not all_sessions_data:
|
|
682
|
+
log_info("No new session data found. Won't calculate metrics.")
|
|
683
|
+
return None
|
|
684
|
+
|
|
685
|
+
results = []
|
|
686
|
+
|
|
687
|
+
for date_to_process in dates_to_process:
|
|
688
|
+
date_key = date_to_process.isoformat()
|
|
689
|
+
sessions_for_date = all_sessions_data.get(date_key, {})
|
|
690
|
+
|
|
691
|
+
# Skip dates with no sessions
|
|
692
|
+
if not any(len(sessions) > 0 for sessions in sessions_for_date.values()):
|
|
693
|
+
continue
|
|
694
|
+
|
|
695
|
+
metrics_record = calculate_date_metrics(date_to_process, sessions_for_date)
|
|
696
|
+
|
|
697
|
+
# Upsert metrics record
|
|
698
|
+
existing_record_idx = None
|
|
699
|
+
for i, existing_metric in enumerate(self._metrics):
|
|
700
|
+
if (
|
|
701
|
+
existing_metric.get("date") == str(date_to_process)
|
|
702
|
+
and existing_metric.get("aggregation_period") == "daily"
|
|
703
|
+
):
|
|
704
|
+
existing_record_idx = i
|
|
705
|
+
break
|
|
706
|
+
|
|
707
|
+
if existing_record_idx is not None:
|
|
708
|
+
self._metrics[existing_record_idx] = metrics_record
|
|
709
|
+
else:
|
|
710
|
+
self._metrics.append(metrics_record)
|
|
711
|
+
|
|
712
|
+
results.append(metrics_record)
|
|
713
|
+
|
|
714
|
+
log_debug("Updated metrics calculations")
|
|
715
|
+
|
|
716
|
+
return results
|
|
717
|
+
|
|
718
|
+
except Exception as e:
|
|
719
|
+
log_warning(f"Exception refreshing metrics: {e}")
|
|
720
|
+
raise e
|
|
721
|
+
|
|
722
|
+
def _get_metrics_calculation_starting_date(self, metrics: List[Dict[str, Any]]) -> Optional[date]:
|
|
723
|
+
"""Get the first date for which metrics calculation is needed."""
|
|
724
|
+
if metrics:
|
|
725
|
+
# Sort by date in descending order
|
|
726
|
+
sorted_metrics = sorted(metrics, key=lambda x: x.get("date", ""), reverse=True)
|
|
727
|
+
latest_metric = sorted_metrics[0]
|
|
728
|
+
|
|
729
|
+
if latest_metric.get("completed", False):
|
|
730
|
+
latest_date = datetime.strptime(latest_metric["date"], "%Y-%m-%d").date()
|
|
731
|
+
return latest_date + timedelta(days=1)
|
|
732
|
+
else:
|
|
733
|
+
return datetime.strptime(latest_metric["date"], "%Y-%m-%d").date()
|
|
734
|
+
|
|
735
|
+
# No metrics records. Return the date of the first recorded session.
|
|
736
|
+
if self._sessions:
|
|
737
|
+
# Sort by created_at
|
|
738
|
+
sorted_sessions = sorted(self._sessions, key=lambda x: x.get("created_at", 0))
|
|
739
|
+
first_session_date = sorted_sessions[0]["created_at"]
|
|
740
|
+
return datetime.fromtimestamp(first_session_date, tz=timezone.utc).date()
|
|
741
|
+
|
|
742
|
+
return None
|
|
743
|
+
|
|
744
|
+
def _get_all_sessions_for_metrics_calculation(
|
|
745
|
+
self, start_timestamp: Optional[int] = None, end_timestamp: Optional[int] = None
|
|
746
|
+
) -> List[Dict[str, Any]]:
|
|
747
|
+
"""Get all sessions for metrics calculation."""
|
|
748
|
+
try:
|
|
749
|
+
filtered_sessions = []
|
|
750
|
+
for session in self._sessions:
|
|
751
|
+
created_at = session.get("created_at", 0)
|
|
752
|
+
if start_timestamp is not None and created_at < start_timestamp:
|
|
753
|
+
continue
|
|
754
|
+
if end_timestamp is not None and created_at >= end_timestamp:
|
|
755
|
+
continue
|
|
756
|
+
|
|
757
|
+
# Only include necessary fields for metrics
|
|
758
|
+
filtered_session = {
|
|
759
|
+
"user_id": session.get("user_id"),
|
|
760
|
+
"session_data": deepcopy(session.get("session_data")),
|
|
761
|
+
"runs": deepcopy(session.get("runs")),
|
|
762
|
+
"created_at": session.get("created_at"),
|
|
763
|
+
"session_type": session.get("session_type"),
|
|
764
|
+
}
|
|
765
|
+
filtered_sessions.append(filtered_session)
|
|
766
|
+
|
|
767
|
+
return filtered_sessions
|
|
768
|
+
|
|
769
|
+
except Exception as e:
|
|
770
|
+
log_error(f"Exception reading sessions for metrics: {e}")
|
|
771
|
+
raise e
|
|
772
|
+
|
|
773
|
+
def get_metrics(
|
|
774
|
+
self,
|
|
775
|
+
starting_date: Optional[date] = None,
|
|
776
|
+
ending_date: Optional[date] = None,
|
|
777
|
+
) -> Tuple[List[dict], Optional[int]]:
|
|
778
|
+
"""Get all metrics matching the given date range."""
|
|
779
|
+
try:
|
|
780
|
+
filtered_metrics = []
|
|
781
|
+
latest_updated_at = None
|
|
782
|
+
|
|
783
|
+
for metric in self._metrics:
|
|
784
|
+
metric_date = datetime.strptime(metric.get("date", ""), "%Y-%m-%d").date()
|
|
785
|
+
|
|
786
|
+
if starting_date and metric_date < starting_date:
|
|
787
|
+
continue
|
|
788
|
+
if ending_date and metric_date > ending_date:
|
|
789
|
+
continue
|
|
790
|
+
|
|
791
|
+
filtered_metrics.append(deepcopy(metric))
|
|
792
|
+
|
|
793
|
+
updated_at = metric.get("updated_at")
|
|
794
|
+
if updated_at and (latest_updated_at is None or updated_at > latest_updated_at):
|
|
795
|
+
latest_updated_at = updated_at
|
|
796
|
+
|
|
797
|
+
return filtered_metrics, latest_updated_at
|
|
798
|
+
|
|
799
|
+
except Exception as e:
|
|
800
|
+
log_error(f"Exception getting metrics: {e}")
|
|
801
|
+
raise e
|
|
802
|
+
|
|
803
|
+
# -- Knowledge methods --
|
|
804
|
+
|
|
805
|
+
def delete_knowledge_content(self, id: str):
|
|
806
|
+
"""Delete a knowledge row from in-memory storage.
|
|
807
|
+
|
|
808
|
+
Args:
|
|
809
|
+
id (str): The ID of the knowledge row to delete.
|
|
810
|
+
|
|
811
|
+
Raises:
|
|
812
|
+
Exception: If an error occurs during deletion.
|
|
813
|
+
"""
|
|
814
|
+
try:
|
|
815
|
+
self._knowledge = [item for item in self._knowledge if item.get("id") != id]
|
|
816
|
+
|
|
817
|
+
except Exception as e:
|
|
818
|
+
log_error(f"Error deleting knowledge content: {e}")
|
|
819
|
+
raise e
|
|
820
|
+
|
|
821
|
+
def get_knowledge_content(self, id: str) -> Optional[KnowledgeRow]:
|
|
822
|
+
"""Get a knowledge row from in-memory storage.
|
|
823
|
+
|
|
824
|
+
Args:
|
|
825
|
+
id (str): The ID of the knowledge row to get.
|
|
826
|
+
|
|
827
|
+
Returns:
|
|
828
|
+
Optional[KnowledgeRow]: The knowledge row, or None if it doesn't exist.
|
|
829
|
+
|
|
830
|
+
Raises:
|
|
831
|
+
Exception: If an error occurs during retrieval.
|
|
832
|
+
"""
|
|
833
|
+
try:
|
|
834
|
+
for item in self._knowledge:
|
|
835
|
+
if item.get("id") == id:
|
|
836
|
+
return KnowledgeRow.model_validate(item)
|
|
837
|
+
|
|
838
|
+
return None
|
|
839
|
+
|
|
840
|
+
except Exception as e:
|
|
841
|
+
log_error(f"Error getting knowledge content: {e}")
|
|
842
|
+
raise e
|
|
843
|
+
|
|
844
|
+
def get_knowledge_contents(
|
|
845
|
+
self,
|
|
846
|
+
limit: Optional[int] = None,
|
|
847
|
+
page: Optional[int] = None,
|
|
848
|
+
sort_by: Optional[str] = None,
|
|
849
|
+
sort_order: Optional[str] = None,
|
|
850
|
+
) -> Tuple[List[KnowledgeRow], int]:
|
|
851
|
+
"""Get all knowledge contents from in-memory storage.
|
|
852
|
+
|
|
853
|
+
Args:
|
|
854
|
+
limit (Optional[int]): The maximum number of knowledge contents to return.
|
|
855
|
+
page (Optional[int]): The page number.
|
|
856
|
+
sort_by (Optional[str]): The column to sort by.
|
|
857
|
+
sort_order (Optional[str]): The order to sort by.
|
|
858
|
+
|
|
859
|
+
Returns:
|
|
860
|
+
Tuple[List[KnowledgeRow], int]: The knowledge contents and total count.
|
|
861
|
+
|
|
862
|
+
Raises:
|
|
863
|
+
Exception: If an error occurs during retrieval.
|
|
864
|
+
"""
|
|
865
|
+
try:
|
|
866
|
+
knowledge_items = [deepcopy(item) for item in self._knowledge]
|
|
867
|
+
|
|
868
|
+
total_count = len(knowledge_items)
|
|
869
|
+
|
|
870
|
+
# Apply sorting
|
|
871
|
+
knowledge_items = apply_sorting(knowledge_items, sort_by, sort_order)
|
|
872
|
+
|
|
873
|
+
# Apply pagination
|
|
874
|
+
if limit is not None:
|
|
875
|
+
start_idx = 0
|
|
876
|
+
if page is not None:
|
|
877
|
+
start_idx = (page - 1) * limit
|
|
878
|
+
knowledge_items = knowledge_items[start_idx : start_idx + limit]
|
|
879
|
+
|
|
880
|
+
return [KnowledgeRow.model_validate(item) for item in knowledge_items], total_count
|
|
881
|
+
|
|
882
|
+
except Exception as e:
|
|
883
|
+
log_error(f"Error getting knowledge contents: {e}")
|
|
884
|
+
raise e
|
|
885
|
+
|
|
886
|
+
def upsert_knowledge_content(self, knowledge_row: KnowledgeRow):
|
|
887
|
+
"""Upsert knowledge content.
|
|
888
|
+
|
|
889
|
+
Args:
|
|
890
|
+
knowledge_row (KnowledgeRow): The knowledge row to upsert.
|
|
891
|
+
|
|
892
|
+
Returns:
|
|
893
|
+
Optional[KnowledgeRow]: The upserted knowledge row, or None if the operation fails.
|
|
894
|
+
|
|
895
|
+
Raises:
|
|
896
|
+
Exception: If an error occurs during upsert.
|
|
897
|
+
"""
|
|
898
|
+
try:
|
|
899
|
+
knowledge_dict = knowledge_row.model_dump()
|
|
900
|
+
|
|
901
|
+
# Find existing item to update
|
|
902
|
+
item_updated = False
|
|
903
|
+
for i, existing_item in enumerate(self._knowledge):
|
|
904
|
+
if existing_item.get("id") == knowledge_row.id:
|
|
905
|
+
self._knowledge[i] = knowledge_dict
|
|
906
|
+
item_updated = True
|
|
907
|
+
break
|
|
908
|
+
|
|
909
|
+
if not item_updated:
|
|
910
|
+
self._knowledge.append(knowledge_dict)
|
|
911
|
+
|
|
912
|
+
return knowledge_row
|
|
913
|
+
|
|
914
|
+
except Exception as e:
|
|
915
|
+
log_error(f"Error upserting knowledge row: {e}")
|
|
916
|
+
raise e
|
|
917
|
+
|
|
918
|
+
# -- Eval methods --
|
|
919
|
+
|
|
920
|
+
def create_eval_run(self, eval_run: EvalRunRecord) -> Optional[EvalRunRecord]:
|
|
921
|
+
"""Create an EvalRunRecord"""
|
|
922
|
+
try:
|
|
923
|
+
current_time = int(time.time())
|
|
924
|
+
eval_dict = eval_run.model_dump()
|
|
925
|
+
eval_dict["created_at"] = current_time
|
|
926
|
+
eval_dict["updated_at"] = current_time
|
|
927
|
+
|
|
928
|
+
self._eval_runs.append(eval_dict)
|
|
929
|
+
|
|
930
|
+
log_debug(f"Created eval run with id '{eval_run.run_id}'")
|
|
931
|
+
|
|
932
|
+
return eval_run
|
|
933
|
+
|
|
934
|
+
except Exception as e:
|
|
935
|
+
log_error(f"Error creating eval run: {e}")
|
|
936
|
+
raise e
|
|
937
|
+
|
|
938
|
+
def delete_eval_runs(self, eval_run_ids: List[str]) -> None:
|
|
939
|
+
"""Delete multiple eval runs from in-memory storage."""
|
|
940
|
+
try:
|
|
941
|
+
original_count = len(self._eval_runs)
|
|
942
|
+
self._eval_runs = [run for run in self._eval_runs if run.get("run_id") not in eval_run_ids]
|
|
943
|
+
|
|
944
|
+
deleted_count = original_count - len(self._eval_runs)
|
|
945
|
+
if deleted_count > 0:
|
|
946
|
+
log_debug(f"Deleted {deleted_count} eval runs")
|
|
947
|
+
else:
|
|
948
|
+
log_debug(f"No eval runs found with IDs: {eval_run_ids}")
|
|
949
|
+
|
|
950
|
+
except Exception as e:
|
|
951
|
+
log_error(f"Error deleting eval runs {eval_run_ids}: {e}")
|
|
952
|
+
raise e
|
|
953
|
+
|
|
954
|
+
def get_eval_run(
|
|
955
|
+
self, eval_run_id: str, deserialize: Optional[bool] = True
|
|
956
|
+
) -> Optional[Union[EvalRunRecord, Dict[str, Any]]]:
|
|
957
|
+
"""Get an eval run from in-memory storage."""
|
|
958
|
+
try:
|
|
959
|
+
for run_data in self._eval_runs:
|
|
960
|
+
if run_data.get("run_id") == eval_run_id:
|
|
961
|
+
run_data_copy = deepcopy(run_data)
|
|
962
|
+
if not deserialize:
|
|
963
|
+
return run_data_copy
|
|
964
|
+
return EvalRunRecord.model_validate(run_data_copy)
|
|
965
|
+
|
|
966
|
+
return None
|
|
967
|
+
|
|
968
|
+
except Exception as e:
|
|
969
|
+
log_error(f"Exception getting eval run {eval_run_id}: {e}")
|
|
970
|
+
raise e
|
|
971
|
+
|
|
972
|
+
def get_eval_runs(
|
|
973
|
+
self,
|
|
974
|
+
limit: Optional[int] = None,
|
|
975
|
+
page: Optional[int] = None,
|
|
976
|
+
sort_by: Optional[str] = None,
|
|
977
|
+
sort_order: Optional[str] = None,
|
|
978
|
+
agent_id: Optional[str] = None,
|
|
979
|
+
team_id: Optional[str] = None,
|
|
980
|
+
workflow_id: Optional[str] = None,
|
|
981
|
+
model_id: Optional[str] = None,
|
|
982
|
+
filter_type: Optional[EvalFilterType] = None,
|
|
983
|
+
eval_type: Optional[List[EvalType]] = None,
|
|
984
|
+
deserialize: Optional[bool] = True,
|
|
985
|
+
) -> Union[List[EvalRunRecord], Tuple[List[Dict[str, Any]], int]]:
|
|
986
|
+
"""Get all eval runs from in-memory storage with filtering and pagination."""
|
|
987
|
+
try:
|
|
988
|
+
# Apply filters
|
|
989
|
+
filtered_runs = []
|
|
990
|
+
for run_data in self._eval_runs:
|
|
991
|
+
if agent_id is not None and run_data.get("agent_id") != agent_id:
|
|
992
|
+
continue
|
|
993
|
+
if team_id is not None and run_data.get("team_id") != team_id:
|
|
994
|
+
continue
|
|
995
|
+
if workflow_id is not None and run_data.get("workflow_id") != workflow_id:
|
|
996
|
+
continue
|
|
997
|
+
if model_id is not None and run_data.get("model_id") != model_id:
|
|
998
|
+
continue
|
|
999
|
+
if eval_type is not None and len(eval_type) > 0:
|
|
1000
|
+
if run_data.get("eval_type") not in eval_type:
|
|
1001
|
+
continue
|
|
1002
|
+
if filter_type is not None:
|
|
1003
|
+
if filter_type == EvalFilterType.AGENT and run_data.get("agent_id") is None:
|
|
1004
|
+
continue
|
|
1005
|
+
elif filter_type == EvalFilterType.TEAM and run_data.get("team_id") is None:
|
|
1006
|
+
continue
|
|
1007
|
+
elif filter_type == EvalFilterType.WORKFLOW and run_data.get("workflow_id") is None:
|
|
1008
|
+
continue
|
|
1009
|
+
|
|
1010
|
+
filtered_runs.append(deepcopy(run_data))
|
|
1011
|
+
|
|
1012
|
+
total_count = len(filtered_runs)
|
|
1013
|
+
|
|
1014
|
+
# Apply sorting (default by created_at desc)
|
|
1015
|
+
if sort_by is None:
|
|
1016
|
+
filtered_runs.sort(key=lambda x: x.get("created_at", 0), reverse=True)
|
|
1017
|
+
else:
|
|
1018
|
+
filtered_runs = apply_sorting(filtered_runs, sort_by, sort_order)
|
|
1019
|
+
|
|
1020
|
+
# Apply pagination
|
|
1021
|
+
if limit is not None:
|
|
1022
|
+
start_idx = 0
|
|
1023
|
+
if page is not None:
|
|
1024
|
+
start_idx = (page - 1) * limit
|
|
1025
|
+
filtered_runs = filtered_runs[start_idx : start_idx + limit]
|
|
1026
|
+
|
|
1027
|
+
if not deserialize:
|
|
1028
|
+
return filtered_runs, total_count
|
|
1029
|
+
|
|
1030
|
+
return [EvalRunRecord.model_validate(run) for run in filtered_runs]
|
|
1031
|
+
|
|
1032
|
+
except Exception as e:
|
|
1033
|
+
log_error(f"Exception getting eval runs: {e}")
|
|
1034
|
+
raise e
|
|
1035
|
+
|
|
1036
|
+
def rename_eval_run(
|
|
1037
|
+
self, eval_run_id: str, name: str, deserialize: Optional[bool] = True
|
|
1038
|
+
) -> Optional[Union[EvalRunRecord, Dict[str, Any]]]:
|
|
1039
|
+
"""Rename an eval run."""
|
|
1040
|
+
try:
|
|
1041
|
+
for i, run_data in enumerate(self._eval_runs):
|
|
1042
|
+
if run_data.get("run_id") == eval_run_id:
|
|
1043
|
+
run_data["name"] = name
|
|
1044
|
+
run_data["updated_at"] = int(time.time())
|
|
1045
|
+
self._eval_runs[i] = run_data
|
|
1046
|
+
|
|
1047
|
+
log_debug(f"Renamed eval run with id '{eval_run_id}' to '{name}'")
|
|
1048
|
+
|
|
1049
|
+
run_data_copy = deepcopy(run_data)
|
|
1050
|
+
if not deserialize:
|
|
1051
|
+
return run_data_copy
|
|
1052
|
+
|
|
1053
|
+
return EvalRunRecord.model_validate(run_data_copy)
|
|
1054
|
+
|
|
1055
|
+
return None
|
|
1056
|
+
|
|
1057
|
+
except Exception as e:
|
|
1058
|
+
log_error(f"Error renaming eval run {eval_run_id}: {e}")
|
|
1059
|
+
raise e
|
|
1060
|
+
|
|
1061
|
+
# -- Culture methods --
|
|
1062
|
+
|
|
1063
|
+
def clear_cultural_knowledge(self) -> None:
|
|
1064
|
+
"""Delete all cultural knowledge from in-memory storage."""
|
|
1065
|
+
try:
|
|
1066
|
+
self._cultural_knowledge = []
|
|
1067
|
+
except Exception as e:
|
|
1068
|
+
log_error(f"Error clearing cultural knowledge: {e}")
|
|
1069
|
+
raise e
|
|
1070
|
+
|
|
1071
|
+
def delete_cultural_knowledge(self, id: str) -> None:
|
|
1072
|
+
"""Delete a cultural knowledge entry from in-memory storage."""
|
|
1073
|
+
try:
|
|
1074
|
+
self._cultural_knowledge = [ck for ck in self._cultural_knowledge if ck.get("id") != id]
|
|
1075
|
+
except Exception as e:
|
|
1076
|
+
log_error(f"Error deleting cultural knowledge: {e}")
|
|
1077
|
+
raise e
|
|
1078
|
+
|
|
1079
|
+
def get_cultural_knowledge(
|
|
1080
|
+
self, id: str, deserialize: Optional[bool] = True
|
|
1081
|
+
) -> Optional[Union[CulturalKnowledge, Dict[str, Any]]]:
|
|
1082
|
+
"""Get a cultural knowledge entry from in-memory storage."""
|
|
1083
|
+
try:
|
|
1084
|
+
for ck_data in self._cultural_knowledge:
|
|
1085
|
+
if ck_data.get("id") == id:
|
|
1086
|
+
ck_data_copy = deepcopy(ck_data)
|
|
1087
|
+
if not deserialize:
|
|
1088
|
+
return ck_data_copy
|
|
1089
|
+
return deserialize_cultural_knowledge_from_db(ck_data_copy)
|
|
1090
|
+
return None
|
|
1091
|
+
except Exception as e:
|
|
1092
|
+
log_error(f"Error getting cultural knowledge: {e}")
|
|
1093
|
+
raise e
|
|
1094
|
+
|
|
1095
|
+
def get_all_cultural_knowledge(
|
|
1096
|
+
self,
|
|
1097
|
+
name: Optional[str] = None,
|
|
1098
|
+
agent_id: Optional[str] = None,
|
|
1099
|
+
team_id: Optional[str] = None,
|
|
1100
|
+
limit: Optional[int] = None,
|
|
1101
|
+
page: Optional[int] = None,
|
|
1102
|
+
sort_by: Optional[str] = None,
|
|
1103
|
+
sort_order: Optional[str] = None,
|
|
1104
|
+
deserialize: Optional[bool] = True,
|
|
1105
|
+
) -> Union[List[CulturalKnowledge], Tuple[List[Dict[str, Any]], int]]:
|
|
1106
|
+
"""Get all cultural knowledge from in-memory storage."""
|
|
1107
|
+
try:
|
|
1108
|
+
filtered_ck = []
|
|
1109
|
+
for ck_data in self._cultural_knowledge:
|
|
1110
|
+
if name and ck_data.get("name") != name:
|
|
1111
|
+
continue
|
|
1112
|
+
if agent_id and ck_data.get("agent_id") != agent_id:
|
|
1113
|
+
continue
|
|
1114
|
+
if team_id and ck_data.get("team_id") != team_id:
|
|
1115
|
+
continue
|
|
1116
|
+
filtered_ck.append(ck_data)
|
|
1117
|
+
|
|
1118
|
+
# Apply sorting
|
|
1119
|
+
if sort_by:
|
|
1120
|
+
filtered_ck = apply_sorting(filtered_ck, sort_by, sort_order)
|
|
1121
|
+
|
|
1122
|
+
total_count = len(filtered_ck)
|
|
1123
|
+
|
|
1124
|
+
# Apply pagination
|
|
1125
|
+
if limit and page:
|
|
1126
|
+
start = (page - 1) * limit
|
|
1127
|
+
filtered_ck = filtered_ck[start : start + limit]
|
|
1128
|
+
elif limit:
|
|
1129
|
+
filtered_ck = filtered_ck[:limit]
|
|
1130
|
+
|
|
1131
|
+
if not deserialize:
|
|
1132
|
+
return [deepcopy(ck) for ck in filtered_ck], total_count
|
|
1133
|
+
|
|
1134
|
+
return [deserialize_cultural_knowledge_from_db(deepcopy(ck)) for ck in filtered_ck]
|
|
1135
|
+
except Exception as e:
|
|
1136
|
+
log_error(f"Error getting all cultural knowledge: {e}")
|
|
1137
|
+
raise e
|
|
1138
|
+
|
|
1139
|
+
def upsert_cultural_knowledge(
|
|
1140
|
+
self, cultural_knowledge: CulturalKnowledge, deserialize: Optional[bool] = True
|
|
1141
|
+
) -> Optional[Union[CulturalKnowledge, Dict[str, Any]]]:
|
|
1142
|
+
"""Upsert a cultural knowledge entry into in-memory storage."""
|
|
1143
|
+
try:
|
|
1144
|
+
if not cultural_knowledge.id:
|
|
1145
|
+
cultural_knowledge.id = str(uuid4())
|
|
1146
|
+
|
|
1147
|
+
# Serialize content, categories, and notes into a dict for DB storage
|
|
1148
|
+
content_dict = serialize_cultural_knowledge_for_db(cultural_knowledge)
|
|
1149
|
+
|
|
1150
|
+
# Create the item dict with serialized content
|
|
1151
|
+
ck_dict = {
|
|
1152
|
+
"id": cultural_knowledge.id,
|
|
1153
|
+
"name": cultural_knowledge.name,
|
|
1154
|
+
"summary": cultural_knowledge.summary,
|
|
1155
|
+
"content": content_dict if content_dict else None,
|
|
1156
|
+
"metadata": cultural_knowledge.metadata,
|
|
1157
|
+
"input": cultural_knowledge.input,
|
|
1158
|
+
"created_at": cultural_knowledge.created_at,
|
|
1159
|
+
"updated_at": int(time.time()),
|
|
1160
|
+
"agent_id": cultural_knowledge.agent_id,
|
|
1161
|
+
"team_id": cultural_knowledge.team_id,
|
|
1162
|
+
}
|
|
1163
|
+
|
|
1164
|
+
# Remove existing entry with same id
|
|
1165
|
+
self._cultural_knowledge = [ck for ck in self._cultural_knowledge if ck.get("id") != cultural_knowledge.id]
|
|
1166
|
+
|
|
1167
|
+
# Add new entry
|
|
1168
|
+
self._cultural_knowledge.append(ck_dict)
|
|
1169
|
+
|
|
1170
|
+
return self.get_cultural_knowledge(cultural_knowledge.id, deserialize=deserialize)
|
|
1171
|
+
except Exception as e:
|
|
1172
|
+
log_error(f"Error upserting cultural knowledge: {e}")
|
|
1173
|
+
raise e
|
|
1174
|
+
|
|
1175
|
+
# --- Traces ---
|
|
1176
|
+
def upsert_trace(self, trace: "Trace") -> None:
|
|
1177
|
+
"""Create or update a single trace record in the database.
|
|
1178
|
+
|
|
1179
|
+
Args:
|
|
1180
|
+
trace: The Trace object to store (one per trace_id).
|
|
1181
|
+
"""
|
|
1182
|
+
raise NotImplementedError
|
|
1183
|
+
|
|
1184
|
+
def get_trace(
|
|
1185
|
+
self,
|
|
1186
|
+
trace_id: Optional[str] = None,
|
|
1187
|
+
run_id: Optional[str] = None,
|
|
1188
|
+
):
|
|
1189
|
+
"""Get a single trace by trace_id or other filters.
|
|
1190
|
+
|
|
1191
|
+
Args:
|
|
1192
|
+
trace_id: The unique trace identifier.
|
|
1193
|
+
run_id: Filter by run ID (returns first match).
|
|
1194
|
+
|
|
1195
|
+
Returns:
|
|
1196
|
+
Optional[Trace]: The trace if found, None otherwise.
|
|
1197
|
+
|
|
1198
|
+
Note:
|
|
1199
|
+
If multiple filters are provided, trace_id takes precedence.
|
|
1200
|
+
For other filters, the most recent trace is returned.
|
|
1201
|
+
"""
|
|
1202
|
+
raise NotImplementedError
|
|
1203
|
+
|
|
1204
|
+
def get_traces(
|
|
1205
|
+
self,
|
|
1206
|
+
run_id: Optional[str] = None,
|
|
1207
|
+
session_id: Optional[str] = None,
|
|
1208
|
+
user_id: Optional[str] = None,
|
|
1209
|
+
agent_id: Optional[str] = None,
|
|
1210
|
+
team_id: Optional[str] = None,
|
|
1211
|
+
workflow_id: Optional[str] = None,
|
|
1212
|
+
status: Optional[str] = None,
|
|
1213
|
+
start_time: Optional[datetime] = None,
|
|
1214
|
+
end_time: Optional[datetime] = None,
|
|
1215
|
+
limit: Optional[int] = 20,
|
|
1216
|
+
page: Optional[int] = 1,
|
|
1217
|
+
) -> tuple[List, int]:
|
|
1218
|
+
"""Get traces matching the provided filters.
|
|
1219
|
+
|
|
1220
|
+
Args:
|
|
1221
|
+
run_id: Filter by run ID.
|
|
1222
|
+
session_id: Filter by session ID.
|
|
1223
|
+
user_id: Filter by user ID.
|
|
1224
|
+
agent_id: Filter by agent ID.
|
|
1225
|
+
team_id: Filter by team ID.
|
|
1226
|
+
workflow_id: Filter by workflow ID.
|
|
1227
|
+
status: Filter by status (OK, ERROR, UNSET).
|
|
1228
|
+
start_time: Filter traces starting after this datetime.
|
|
1229
|
+
end_time: Filter traces ending before this datetime.
|
|
1230
|
+
limit: Maximum number of traces to return per page.
|
|
1231
|
+
page: Page number (1-indexed).
|
|
1232
|
+
|
|
1233
|
+
Returns:
|
|
1234
|
+
tuple[List[Trace], int]: Tuple of (list of matching traces, total count).
|
|
1235
|
+
"""
|
|
1236
|
+
raise NotImplementedError
|
|
1237
|
+
|
|
1238
|
+
def get_trace_stats(
|
|
1239
|
+
self,
|
|
1240
|
+
user_id: Optional[str] = None,
|
|
1241
|
+
agent_id: Optional[str] = None,
|
|
1242
|
+
team_id: Optional[str] = None,
|
|
1243
|
+
workflow_id: Optional[str] = None,
|
|
1244
|
+
start_time: Optional[datetime] = None,
|
|
1245
|
+
end_time: Optional[datetime] = None,
|
|
1246
|
+
limit: Optional[int] = 20,
|
|
1247
|
+
page: Optional[int] = 1,
|
|
1248
|
+
) -> tuple[List[Dict[str, Any]], int]:
|
|
1249
|
+
"""Get trace statistics grouped by session.
|
|
1250
|
+
|
|
1251
|
+
Args:
|
|
1252
|
+
user_id: Filter by user ID.
|
|
1253
|
+
agent_id: Filter by agent ID.
|
|
1254
|
+
team_id: Filter by team ID.
|
|
1255
|
+
workflow_id: Filter by workflow ID.
|
|
1256
|
+
start_time: Filter sessions with traces created after this datetime.
|
|
1257
|
+
end_time: Filter sessions with traces created before this datetime.
|
|
1258
|
+
limit: Maximum number of sessions to return per page.
|
|
1259
|
+
page: Page number (1-indexed).
|
|
1260
|
+
|
|
1261
|
+
Returns:
|
|
1262
|
+
tuple[List[Dict], int]: Tuple of (list of session stats dicts, total count).
|
|
1263
|
+
Each dict contains: session_id, user_id, agent_id, team_id, workflow_id, total_traces,
|
|
1264
|
+
first_trace_at, last_trace_at.
|
|
1265
|
+
"""
|
|
1266
|
+
raise NotImplementedError
|
|
1267
|
+
|
|
1268
|
+
# --- Spans ---
|
|
1269
|
+
def create_span(self, span: "Span") -> None:
|
|
1270
|
+
"""Create a single span in the database.
|
|
1271
|
+
|
|
1272
|
+
Args:
|
|
1273
|
+
span: The Span object to store.
|
|
1274
|
+
"""
|
|
1275
|
+
raise NotImplementedError
|
|
1276
|
+
|
|
1277
|
+
def create_spans(self, spans: List) -> None:
|
|
1278
|
+
"""Create multiple spans in the database as a batch.
|
|
1279
|
+
|
|
1280
|
+
Args:
|
|
1281
|
+
spans: List of Span objects to store.
|
|
1282
|
+
"""
|
|
1283
|
+
raise NotImplementedError
|
|
1284
|
+
|
|
1285
|
+
def get_span(self, span_id: str):
|
|
1286
|
+
"""Get a single span by its span_id.
|
|
1287
|
+
|
|
1288
|
+
Args:
|
|
1289
|
+
span_id: The unique span identifier.
|
|
1290
|
+
|
|
1291
|
+
Returns:
|
|
1292
|
+
Optional[Span]: The span if found, None otherwise.
|
|
1293
|
+
"""
|
|
1294
|
+
raise NotImplementedError
|
|
1295
|
+
|
|
1296
|
+
def get_spans(
|
|
1297
|
+
self,
|
|
1298
|
+
trace_id: Optional[str] = None,
|
|
1299
|
+
parent_span_id: Optional[str] = None,
|
|
1300
|
+
limit: Optional[int] = 1000,
|
|
1301
|
+
) -> List:
|
|
1302
|
+
"""Get spans matching the provided filters.
|
|
1303
|
+
|
|
1304
|
+
Args:
|
|
1305
|
+
trace_id: Filter by trace ID.
|
|
1306
|
+
parent_span_id: Filter by parent span ID.
|
|
1307
|
+
limit: Maximum number of spans to return.
|
|
1308
|
+
|
|
1309
|
+
Returns:
|
|
1310
|
+
List[Span]: List of matching spans.
|
|
1311
|
+
"""
|
|
1312
|
+
raise NotImplementedError
|