agno 1.8.0__py3-none-any.whl → 2.0.0a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agno/__init__.py +8 -0
- agno/agent/__init__.py +19 -27
- agno/agent/agent.py +2781 -4126
- agno/api/agent.py +9 -65
- agno/api/api.py +5 -46
- agno/api/evals.py +6 -17
- agno/api/os.py +17 -0
- agno/api/routes.py +6 -41
- agno/api/schemas/__init__.py +9 -0
- agno/api/schemas/agent.py +5 -21
- agno/api/schemas/evals.py +7 -16
- agno/api/schemas/os.py +14 -0
- agno/api/schemas/team.py +5 -21
- agno/api/schemas/utils.py +21 -0
- agno/api/schemas/workflows.py +11 -7
- agno/api/settings.py +53 -0
- agno/api/team.py +9 -64
- agno/api/workflow.py +28 -0
- agno/cloud/aws/base.py +214 -0
- agno/cloud/aws/s3/__init__.py +2 -0
- agno/cloud/aws/s3/api_client.py +43 -0
- agno/cloud/aws/s3/bucket.py +195 -0
- agno/cloud/aws/s3/object.py +57 -0
- agno/db/__init__.py +24 -0
- agno/db/base.py +245 -0
- agno/db/dynamo/__init__.py +3 -0
- agno/db/dynamo/dynamo.py +1749 -0
- agno/db/dynamo/schemas.py +278 -0
- agno/db/dynamo/utils.py +684 -0
- agno/db/firestore/__init__.py +3 -0
- agno/db/firestore/firestore.py +1438 -0
- agno/db/firestore/schemas.py +130 -0
- agno/db/firestore/utils.py +278 -0
- agno/db/gcs_json/__init__.py +3 -0
- agno/db/gcs_json/gcs_json_db.py +1001 -0
- agno/db/gcs_json/utils.py +194 -0
- agno/db/in_memory/__init__.py +3 -0
- agno/db/in_memory/in_memory_db.py +888 -0
- agno/db/in_memory/utils.py +172 -0
- agno/db/json/__init__.py +3 -0
- agno/db/json/json_db.py +1051 -0
- agno/db/json/utils.py +196 -0
- agno/db/migrations/v1_to_v2.py +162 -0
- agno/db/mongo/__init__.py +3 -0
- agno/db/mongo/mongo.py +1417 -0
- agno/db/mongo/schemas.py +77 -0
- agno/db/mongo/utils.py +204 -0
- agno/db/mysql/__init__.py +3 -0
- agno/db/mysql/mysql.py +1719 -0
- agno/db/mysql/schemas.py +124 -0
- agno/db/mysql/utils.py +298 -0
- agno/db/postgres/__init__.py +3 -0
- agno/db/postgres/postgres.py +1720 -0
- agno/db/postgres/schemas.py +124 -0
- agno/db/postgres/utils.py +281 -0
- agno/db/redis/__init__.py +3 -0
- agno/db/redis/redis.py +1371 -0
- agno/db/redis/schemas.py +109 -0
- agno/db/redis/utils.py +288 -0
- agno/db/schemas/__init__.py +3 -0
- agno/db/schemas/evals.py +33 -0
- agno/db/schemas/knowledge.py +40 -0
- agno/db/schemas/memory.py +46 -0
- agno/db/singlestore/__init__.py +3 -0
- agno/db/singlestore/schemas.py +116 -0
- agno/db/singlestore/singlestore.py +1722 -0
- agno/db/singlestore/utils.py +327 -0
- agno/db/sqlite/__init__.py +3 -0
- agno/db/sqlite/schemas.py +119 -0
- agno/db/sqlite/sqlite.py +1680 -0
- agno/db/sqlite/utils.py +269 -0
- agno/db/utils.py +88 -0
- agno/eval/__init__.py +14 -0
- agno/eval/accuracy.py +142 -43
- agno/eval/performance.py +88 -23
- agno/eval/reliability.py +73 -20
- agno/eval/utils.py +23 -13
- agno/integrations/discord/__init__.py +3 -0
- agno/{app → integrations}/discord/client.py +10 -10
- agno/knowledge/__init__.py +2 -2
- agno/{document → knowledge}/chunking/agentic.py +2 -2
- agno/{document → knowledge}/chunking/document.py +2 -2
- agno/{document → knowledge}/chunking/fixed.py +3 -3
- agno/{document → knowledge}/chunking/markdown.py +2 -2
- agno/{document → knowledge}/chunking/recursive.py +2 -2
- agno/{document → knowledge}/chunking/row.py +2 -2
- agno/knowledge/chunking/semantic.py +59 -0
- agno/knowledge/chunking/strategy.py +121 -0
- agno/knowledge/content.py +74 -0
- agno/knowledge/document/__init__.py +5 -0
- agno/{document → knowledge/document}/base.py +12 -2
- agno/knowledge/embedder/__init__.py +5 -0
- agno/{embedder → knowledge/embedder}/aws_bedrock.py +127 -1
- agno/{embedder → knowledge/embedder}/azure_openai.py +65 -1
- agno/{embedder → knowledge/embedder}/base.py +6 -0
- agno/{embedder → knowledge/embedder}/cohere.py +72 -1
- agno/{embedder → knowledge/embedder}/fastembed.py +17 -1
- agno/{embedder → knowledge/embedder}/fireworks.py +1 -1
- agno/{embedder → knowledge/embedder}/google.py +74 -1
- agno/{embedder → knowledge/embedder}/huggingface.py +36 -2
- agno/{embedder → knowledge/embedder}/jina.py +48 -2
- agno/knowledge/embedder/langdb.py +22 -0
- agno/knowledge/embedder/mistral.py +139 -0
- agno/{embedder → knowledge/embedder}/nebius.py +1 -1
- agno/{embedder → knowledge/embedder}/ollama.py +54 -3
- agno/knowledge/embedder/openai.py +223 -0
- agno/{embedder → knowledge/embedder}/sentence_transformer.py +16 -1
- agno/{embedder → knowledge/embedder}/together.py +1 -1
- agno/{embedder → knowledge/embedder}/voyageai.py +49 -1
- agno/knowledge/knowledge.py +1515 -0
- agno/knowledge/reader/__init__.py +7 -0
- agno/{document → knowledge}/reader/arxiv_reader.py +32 -4
- agno/knowledge/reader/base.py +88 -0
- agno/{document → knowledge}/reader/csv_reader.py +68 -15
- agno/knowledge/reader/docx_reader.py +83 -0
- agno/{document → knowledge}/reader/firecrawl_reader.py +42 -21
- agno/knowledge/reader/gcs_reader.py +67 -0
- agno/{document → knowledge}/reader/json_reader.py +30 -9
- agno/{document → knowledge}/reader/markdown_reader.py +36 -9
- agno/{document → knowledge}/reader/pdf_reader.py +79 -21
- agno/knowledge/reader/reader_factory.py +275 -0
- agno/knowledge/reader/s3_reader.py +171 -0
- agno/{document → knowledge}/reader/text_reader.py +31 -10
- agno/knowledge/reader/url_reader.py +84 -0
- agno/knowledge/reader/web_search_reader.py +389 -0
- agno/{document → knowledge}/reader/website_reader.py +37 -10
- agno/knowledge/reader/wikipedia_reader.py +59 -0
- agno/knowledge/reader/youtube_reader.py +78 -0
- agno/knowledge/remote_content/remote_content.py +88 -0
- agno/{reranker → knowledge/reranker}/base.py +1 -1
- agno/{reranker → knowledge/reranker}/cohere.py +2 -2
- agno/{reranker → knowledge/reranker}/infinity.py +2 -2
- agno/{reranker → knowledge/reranker}/sentence_transformer.py +2 -2
- agno/knowledge/types.py +30 -0
- agno/knowledge/utils.py +169 -0
- agno/media.py +2 -2
- agno/memory/__init__.py +2 -10
- agno/memory/manager.py +1003 -148
- agno/models/aimlapi/__init__.py +2 -2
- agno/models/aimlapi/aimlapi.py +6 -6
- agno/models/anthropic/claude.py +129 -82
- agno/models/aws/bedrock.py +107 -175
- agno/models/aws/claude.py +64 -18
- agno/models/azure/ai_foundry.py +73 -23
- agno/models/base.py +347 -287
- agno/models/cerebras/cerebras.py +84 -27
- agno/models/cohere/chat.py +106 -98
- agno/models/dashscope/dashscope.py +14 -5
- agno/models/google/gemini.py +123 -53
- agno/models/groq/groq.py +97 -35
- agno/models/huggingface/huggingface.py +92 -27
- agno/models/ibm/watsonx.py +72 -13
- agno/models/litellm/chat.py +85 -13
- agno/models/message.py +38 -144
- agno/models/meta/llama.py +85 -49
- agno/models/metrics.py +120 -0
- agno/models/mistral/mistral.py +90 -21
- agno/models/ollama/__init__.py +0 -2
- agno/models/ollama/chat.py +84 -46
- agno/models/openai/chat.py +135 -27
- agno/models/openai/responses.py +233 -115
- agno/models/perplexity/perplexity.py +26 -2
- agno/models/portkey/portkey.py +0 -7
- agno/models/response.py +14 -8
- agno/models/utils.py +20 -0
- agno/models/vercel/__init__.py +2 -2
- agno/models/vercel/v0.py +1 -1
- agno/models/vllm/__init__.py +2 -2
- agno/models/vllm/vllm.py +3 -3
- agno/models/xai/xai.py +10 -10
- agno/os/__init__.py +3 -0
- agno/os/app.py +393 -0
- agno/os/auth.py +47 -0
- agno/os/config.py +103 -0
- agno/os/interfaces/agui/__init__.py +3 -0
- agno/os/interfaces/agui/agui.py +31 -0
- agno/{app/agui/async_router.py → os/interfaces/agui/router.py} +16 -16
- agno/{app → os/interfaces}/agui/utils.py +65 -28
- agno/os/interfaces/base.py +21 -0
- agno/os/interfaces/slack/__init__.py +3 -0
- agno/{app/slack/async_router.py → os/interfaces/slack/router.py} +3 -5
- agno/os/interfaces/slack/slack.py +33 -0
- agno/os/interfaces/whatsapp/__init__.py +3 -0
- agno/{app/whatsapp/async_router.py → os/interfaces/whatsapp/router.py} +4 -7
- agno/os/interfaces/whatsapp/whatsapp.py +30 -0
- agno/os/router.py +843 -0
- agno/os/routers/__init__.py +3 -0
- agno/os/routers/evals/__init__.py +3 -0
- agno/os/routers/evals/evals.py +204 -0
- agno/os/routers/evals/schemas.py +142 -0
- agno/os/routers/evals/utils.py +161 -0
- agno/os/routers/knowledge/__init__.py +3 -0
- agno/os/routers/knowledge/knowledge.py +413 -0
- agno/os/routers/knowledge/schemas.py +118 -0
- agno/os/routers/memory/__init__.py +3 -0
- agno/os/routers/memory/memory.py +179 -0
- agno/os/routers/memory/schemas.py +58 -0
- agno/os/routers/metrics/__init__.py +3 -0
- agno/os/routers/metrics/metrics.py +58 -0
- agno/os/routers/metrics/schemas.py +47 -0
- agno/os/routers/session/__init__.py +3 -0
- agno/os/routers/session/session.py +163 -0
- agno/os/schema.py +892 -0
- agno/{app/playground → os}/settings.py +8 -15
- agno/os/utils.py +270 -0
- agno/reasoning/azure_ai_foundry.py +4 -4
- agno/reasoning/deepseek.py +4 -4
- agno/reasoning/default.py +6 -11
- agno/reasoning/groq.py +4 -4
- agno/reasoning/helpers.py +4 -6
- agno/reasoning/ollama.py +4 -4
- agno/reasoning/openai.py +4 -4
- agno/run/{response.py → agent.py} +144 -72
- agno/run/base.py +44 -58
- agno/run/cancel.py +83 -0
- agno/run/team.py +133 -77
- agno/run/workflow.py +537 -12
- agno/session/__init__.py +10 -0
- agno/session/agent.py +244 -0
- agno/session/summary.py +225 -0
- agno/session/team.py +262 -0
- agno/{storage/session/v2 → session}/workflow.py +47 -24
- agno/team/__init__.py +15 -16
- agno/team/team.py +2967 -4243
- agno/tools/agentql.py +14 -5
- agno/tools/airflow.py +9 -4
- agno/tools/api.py +7 -3
- agno/tools/apify.py +2 -46
- agno/tools/arxiv.py +8 -3
- agno/tools/aws_lambda.py +7 -5
- agno/tools/aws_ses.py +7 -1
- agno/tools/baidusearch.py +4 -1
- agno/tools/bitbucket.py +4 -4
- agno/tools/brandfetch.py +14 -11
- agno/tools/bravesearch.py +4 -1
- agno/tools/brightdata.py +42 -22
- agno/tools/browserbase.py +13 -4
- agno/tools/calcom.py +12 -10
- agno/tools/calculator.py +10 -27
- agno/tools/cartesia.py +18 -13
- agno/tools/{clickup_tool.py → clickup.py} +12 -25
- agno/tools/confluence.py +71 -18
- agno/tools/crawl4ai.py +7 -1
- agno/tools/csv_toolkit.py +9 -8
- agno/tools/dalle.py +18 -11
- agno/tools/daytona.py +13 -16
- agno/tools/decorator.py +6 -3
- agno/tools/desi_vocal.py +16 -7
- agno/tools/discord.py +11 -8
- agno/tools/docker.py +30 -42
- agno/tools/duckdb.py +34 -53
- agno/tools/duckduckgo.py +8 -7
- agno/tools/e2b.py +62 -62
- agno/tools/eleven_labs.py +35 -28
- agno/tools/email.py +4 -1
- agno/tools/evm.py +7 -1
- agno/tools/exa.py +19 -14
- agno/tools/fal.py +29 -29
- agno/tools/file.py +9 -8
- agno/tools/financial_datasets.py +25 -44
- agno/tools/firecrawl.py +22 -22
- agno/tools/function.py +68 -17
- agno/tools/giphy.py +22 -10
- agno/tools/github.py +48 -126
- agno/tools/gmail.py +46 -62
- agno/tools/google_bigquery.py +7 -6
- agno/tools/google_maps.py +11 -26
- agno/tools/googlesearch.py +7 -2
- agno/tools/googlesheets.py +21 -17
- agno/tools/hackernews.py +9 -5
- agno/tools/jina.py +5 -4
- agno/tools/jira.py +18 -9
- agno/tools/knowledge.py +31 -32
- agno/tools/linear.py +18 -33
- agno/tools/linkup.py +5 -1
- agno/tools/local_file_system.py +8 -5
- agno/tools/lumalab.py +31 -19
- agno/tools/mem0.py +18 -12
- agno/tools/memori.py +14 -10
- agno/tools/mlx_transcribe.py +3 -2
- agno/tools/models/azure_openai.py +32 -14
- agno/tools/models/gemini.py +58 -31
- agno/tools/models/groq.py +29 -20
- agno/tools/models/nebius.py +27 -11
- agno/tools/models_labs.py +39 -15
- agno/tools/moviepy_video.py +7 -6
- agno/tools/neo4j.py +134 -0
- agno/tools/newspaper.py +7 -2
- agno/tools/newspaper4k.py +8 -3
- agno/tools/openai.py +57 -26
- agno/tools/openbb.py +12 -11
- agno/tools/opencv.py +62 -46
- agno/tools/openweather.py +14 -12
- agno/tools/pandas.py +11 -3
- agno/tools/postgres.py +4 -12
- agno/tools/pubmed.py +4 -1
- agno/tools/python.py +9 -22
- agno/tools/reasoning.py +35 -27
- agno/tools/reddit.py +11 -26
- agno/tools/replicate.py +54 -41
- agno/tools/resend.py +4 -1
- agno/tools/scrapegraph.py +15 -14
- agno/tools/searxng.py +10 -23
- agno/tools/serpapi.py +6 -3
- agno/tools/serper.py +13 -4
- agno/tools/shell.py +9 -2
- agno/tools/slack.py +12 -11
- agno/tools/sleep.py +3 -2
- agno/tools/spider.py +24 -4
- agno/tools/sql.py +7 -6
- agno/tools/tavily.py +6 -4
- agno/tools/telegram.py +12 -4
- agno/tools/todoist.py +11 -31
- agno/tools/toolkit.py +1 -1
- agno/tools/trafilatura.py +22 -6
- agno/tools/trello.py +9 -22
- agno/tools/twilio.py +10 -3
- agno/tools/user_control_flow.py +6 -1
- agno/tools/valyu.py +34 -5
- agno/tools/visualization.py +19 -28
- agno/tools/webbrowser.py +4 -3
- agno/tools/webex.py +11 -7
- agno/tools/website.py +15 -46
- agno/tools/webtools.py +12 -4
- agno/tools/whatsapp.py +5 -9
- agno/tools/wikipedia.py +20 -13
- agno/tools/x.py +14 -13
- agno/tools/yfinance.py +13 -40
- agno/tools/youtube.py +26 -20
- agno/tools/zendesk.py +7 -2
- agno/tools/zep.py +10 -7
- agno/tools/zoom.py +10 -9
- agno/utils/common.py +1 -19
- agno/utils/events.py +95 -118
- agno/utils/knowledge.py +29 -0
- agno/utils/location.py +2 -2
- agno/utils/log.py +2 -2
- agno/utils/mcp.py +11 -5
- agno/utils/media.py +39 -0
- agno/utils/message.py +12 -1
- agno/utils/models/claude.py +6 -4
- agno/utils/models/mistral.py +8 -7
- agno/utils/models/schema_utils.py +3 -3
- agno/utils/pprint.py +33 -32
- agno/utils/print_response/agent.py +779 -0
- agno/utils/print_response/team.py +1565 -0
- agno/utils/print_response/workflow.py +1451 -0
- agno/utils/prompts.py +14 -14
- agno/utils/reasoning.py +87 -0
- agno/utils/response.py +42 -42
- agno/utils/string.py +8 -22
- agno/utils/team.py +50 -0
- agno/utils/timer.py +2 -2
- agno/vectordb/base.py +33 -21
- agno/vectordb/cassandra/cassandra.py +287 -23
- agno/vectordb/chroma/chromadb.py +482 -59
- agno/vectordb/clickhouse/clickhousedb.py +270 -63
- agno/vectordb/couchbase/couchbase.py +309 -29
- agno/vectordb/lancedb/lance_db.py +360 -21
- agno/vectordb/langchaindb/__init__.py +5 -0
- agno/vectordb/langchaindb/langchaindb.py +145 -0
- agno/vectordb/lightrag/__init__.py +5 -0
- agno/vectordb/lightrag/lightrag.py +374 -0
- agno/vectordb/llamaindex/llamaindexdb.py +127 -0
- agno/vectordb/milvus/milvus.py +242 -32
- agno/vectordb/mongodb/mongodb.py +200 -24
- agno/vectordb/pgvector/pgvector.py +319 -37
- agno/vectordb/pineconedb/pineconedb.py +221 -27
- agno/vectordb/qdrant/qdrant.py +356 -14
- agno/vectordb/singlestore/singlestore.py +286 -29
- agno/vectordb/surrealdb/surrealdb.py +187 -7
- agno/vectordb/upstashdb/upstashdb.py +342 -26
- agno/vectordb/weaviate/weaviate.py +227 -165
- agno/workflow/__init__.py +17 -13
- agno/workflow/{v2/condition.py → condition.py} +135 -32
- agno/workflow/{v2/loop.py → loop.py} +115 -28
- agno/workflow/{v2/parallel.py → parallel.py} +138 -108
- agno/workflow/{v2/router.py → router.py} +133 -32
- agno/workflow/{v2/step.py → step.py} +200 -42
- agno/workflow/{v2/steps.py → steps.py} +147 -66
- agno/workflow/types.py +482 -0
- agno/workflow/workflow.py +2394 -696
- agno-2.0.0a1.dist-info/METADATA +355 -0
- agno-2.0.0a1.dist-info/RECORD +514 -0
- agno/agent/metrics.py +0 -107
- agno/api/app.py +0 -35
- agno/api/playground.py +0 -92
- agno/api/schemas/app.py +0 -12
- agno/api/schemas/playground.py +0 -22
- agno/api/schemas/user.py +0 -35
- agno/api/schemas/workspace.py +0 -46
- agno/api/user.py +0 -160
- agno/api/workflows.py +0 -33
- agno/api/workspace.py +0 -175
- agno/app/agui/__init__.py +0 -3
- agno/app/agui/app.py +0 -17
- agno/app/agui/sync_router.py +0 -120
- agno/app/base.py +0 -186
- agno/app/discord/__init__.py +0 -3
- agno/app/fastapi/__init__.py +0 -3
- agno/app/fastapi/app.py +0 -107
- agno/app/fastapi/async_router.py +0 -457
- agno/app/fastapi/sync_router.py +0 -448
- agno/app/playground/app.py +0 -228
- agno/app/playground/async_router.py +0 -1050
- agno/app/playground/deploy.py +0 -249
- agno/app/playground/operator.py +0 -183
- agno/app/playground/schemas.py +0 -220
- agno/app/playground/serve.py +0 -55
- agno/app/playground/sync_router.py +0 -1042
- agno/app/playground/utils.py +0 -46
- agno/app/settings.py +0 -15
- agno/app/slack/__init__.py +0 -3
- agno/app/slack/app.py +0 -19
- agno/app/slack/sync_router.py +0 -92
- agno/app/utils.py +0 -54
- agno/app/whatsapp/__init__.py +0 -3
- agno/app/whatsapp/app.py +0 -15
- agno/app/whatsapp/sync_router.py +0 -197
- agno/cli/auth_server.py +0 -249
- agno/cli/config.py +0 -274
- agno/cli/console.py +0 -88
- agno/cli/credentials.py +0 -23
- agno/cli/entrypoint.py +0 -571
- agno/cli/operator.py +0 -357
- agno/cli/settings.py +0 -96
- agno/cli/ws/ws_cli.py +0 -817
- agno/constants.py +0 -13
- agno/document/__init__.py +0 -5
- agno/document/chunking/semantic.py +0 -45
- agno/document/chunking/strategy.py +0 -31
- agno/document/reader/__init__.py +0 -5
- agno/document/reader/base.py +0 -47
- agno/document/reader/docx_reader.py +0 -60
- agno/document/reader/gcs/pdf_reader.py +0 -44
- agno/document/reader/s3/pdf_reader.py +0 -59
- agno/document/reader/s3/text_reader.py +0 -63
- agno/document/reader/url_reader.py +0 -59
- agno/document/reader/youtube_reader.py +0 -58
- agno/embedder/__init__.py +0 -5
- agno/embedder/langdb.py +0 -80
- agno/embedder/mistral.py +0 -82
- agno/embedder/openai.py +0 -78
- agno/file/__init__.py +0 -5
- agno/file/file.py +0 -16
- agno/file/local/csv.py +0 -32
- agno/file/local/txt.py +0 -19
- agno/infra/app.py +0 -240
- agno/infra/base.py +0 -144
- agno/infra/context.py +0 -20
- agno/infra/db_app.py +0 -52
- agno/infra/resource.py +0 -205
- agno/infra/resources.py +0 -55
- agno/knowledge/agent.py +0 -698
- agno/knowledge/arxiv.py +0 -33
- agno/knowledge/combined.py +0 -36
- agno/knowledge/csv.py +0 -144
- agno/knowledge/csv_url.py +0 -124
- agno/knowledge/document.py +0 -223
- agno/knowledge/docx.py +0 -137
- agno/knowledge/firecrawl.py +0 -34
- agno/knowledge/gcs/__init__.py +0 -0
- agno/knowledge/gcs/base.py +0 -39
- agno/knowledge/gcs/pdf.py +0 -125
- agno/knowledge/json.py +0 -137
- agno/knowledge/langchain.py +0 -71
- agno/knowledge/light_rag.py +0 -273
- agno/knowledge/llamaindex.py +0 -66
- agno/knowledge/markdown.py +0 -154
- agno/knowledge/pdf.py +0 -164
- agno/knowledge/pdf_bytes.py +0 -42
- agno/knowledge/pdf_url.py +0 -148
- agno/knowledge/s3/__init__.py +0 -0
- agno/knowledge/s3/base.py +0 -64
- agno/knowledge/s3/pdf.py +0 -33
- agno/knowledge/s3/text.py +0 -34
- agno/knowledge/text.py +0 -141
- agno/knowledge/url.py +0 -46
- agno/knowledge/website.py +0 -179
- agno/knowledge/wikipedia.py +0 -32
- agno/knowledge/youtube.py +0 -35
- agno/memory/agent.py +0 -423
- agno/memory/classifier.py +0 -104
- agno/memory/db/__init__.py +0 -5
- agno/memory/db/base.py +0 -42
- agno/memory/db/mongodb.py +0 -189
- agno/memory/db/postgres.py +0 -203
- agno/memory/db/sqlite.py +0 -193
- agno/memory/memory.py +0 -22
- agno/memory/row.py +0 -36
- agno/memory/summarizer.py +0 -201
- agno/memory/summary.py +0 -19
- agno/memory/team.py +0 -415
- agno/memory/v2/__init__.py +0 -2
- agno/memory/v2/db/__init__.py +0 -1
- agno/memory/v2/db/base.py +0 -42
- agno/memory/v2/db/firestore.py +0 -339
- agno/memory/v2/db/mongodb.py +0 -196
- agno/memory/v2/db/postgres.py +0 -214
- agno/memory/v2/db/redis.py +0 -187
- agno/memory/v2/db/schema.py +0 -54
- agno/memory/v2/db/sqlite.py +0 -209
- agno/memory/v2/manager.py +0 -437
- agno/memory/v2/memory.py +0 -1097
- agno/memory/v2/schema.py +0 -55
- agno/memory/v2/summarizer.py +0 -215
- agno/memory/workflow.py +0 -38
- agno/models/ollama/tools.py +0 -430
- agno/models/qwen/__init__.py +0 -5
- agno/playground/__init__.py +0 -10
- agno/playground/deploy.py +0 -3
- agno/playground/playground.py +0 -3
- agno/playground/serve.py +0 -3
- agno/playground/settings.py +0 -3
- agno/reranker/__init__.py +0 -0
- agno/run/v2/__init__.py +0 -0
- agno/run/v2/workflow.py +0 -567
- agno/storage/__init__.py +0 -0
- agno/storage/agent/__init__.py +0 -0
- agno/storage/agent/dynamodb.py +0 -1
- agno/storage/agent/json.py +0 -1
- agno/storage/agent/mongodb.py +0 -1
- agno/storage/agent/postgres.py +0 -1
- agno/storage/agent/singlestore.py +0 -1
- agno/storage/agent/sqlite.py +0 -1
- agno/storage/agent/yaml.py +0 -1
- agno/storage/base.py +0 -60
- agno/storage/dynamodb.py +0 -673
- agno/storage/firestore.py +0 -297
- agno/storage/gcs_json.py +0 -261
- agno/storage/in_memory.py +0 -234
- agno/storage/json.py +0 -237
- agno/storage/mongodb.py +0 -328
- agno/storage/mysql.py +0 -685
- agno/storage/postgres.py +0 -682
- agno/storage/redis.py +0 -336
- agno/storage/session/__init__.py +0 -16
- agno/storage/session/agent.py +0 -64
- agno/storage/session/team.py +0 -63
- agno/storage/session/v2/__init__.py +0 -5
- agno/storage/session/workflow.py +0 -61
- agno/storage/singlestore.py +0 -606
- agno/storage/sqlite.py +0 -646
- agno/storage/workflow/__init__.py +0 -0
- agno/storage/workflow/mongodb.py +0 -1
- agno/storage/workflow/postgres.py +0 -1
- agno/storage/workflow/sqlite.py +0 -1
- agno/storage/yaml.py +0 -241
- agno/tools/thinking.py +0 -73
- agno/utils/defaults.py +0 -57
- agno/utils/filesystem.py +0 -39
- agno/utils/git.py +0 -52
- agno/utils/json_io.py +0 -30
- agno/utils/load_env.py +0 -19
- agno/utils/py_io.py +0 -19
- agno/utils/pyproject.py +0 -18
- agno/utils/resource_filter.py +0 -31
- agno/workflow/v2/__init__.py +0 -21
- agno/workflow/v2/types.py +0 -357
- agno/workflow/v2/workflow.py +0 -3312
- agno/workspace/__init__.py +0 -0
- agno/workspace/config.py +0 -325
- agno/workspace/enums.py +0 -6
- agno/workspace/helpers.py +0 -52
- agno/workspace/operator.py +0 -757
- agno/workspace/settings.py +0 -158
- agno-1.8.0.dist-info/METADATA +0 -979
- agno-1.8.0.dist-info/RECORD +0 -565
- agno-1.8.0.dist-info/entry_points.txt +0 -3
- /agno/{app → db/migrations}/__init__.py +0 -0
- /agno/{app/playground/__init__.py → db/schemas/metrics.py} +0 -0
- /agno/{cli → integrations}/__init__.py +0 -0
- /agno/{cli/ws → knowledge/chunking}/__init__.py +0 -0
- /agno/{document/chunking → knowledge/remote_content}/__init__.py +0 -0
- /agno/{document/reader/gcs → knowledge/reranker}/__init__.py +0 -0
- /agno/{document/reader/s3 → os/interfaces}/__init__.py +0 -0
- /agno/{app → os/interfaces}/slack/security.py +0 -0
- /agno/{app → os/interfaces}/whatsapp/security.py +0 -0
- /agno/{file/local → utils/print_response}/__init__.py +0 -0
- /agno/{infra → vectordb/llamaindex}/__init__.py +0 -0
- {agno-1.8.0.dist-info → agno-2.0.0a1.dist-info}/WHEEL +0 -0
- {agno-1.8.0.dist-info → agno-2.0.0a1.dist-info}/licenses/LICENSE +0 -0
- {agno-1.8.0.dist-info → agno-2.0.0a1.dist-info}/top_level.txt +0 -0
agno/db/sqlite/sqlite.py
ADDED
|
@@ -0,0 +1,1680 @@
|
|
|
1
|
+
import time
|
|
2
|
+
from datetime import date, datetime, timedelta, timezone
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from typing import Any, Dict, List, Optional, Sequence, Tuple, Union
|
|
5
|
+
from uuid import uuid4
|
|
6
|
+
|
|
7
|
+
from agno.db.base import BaseDb, SessionType
|
|
8
|
+
from agno.db.schemas.evals import EvalFilterType, EvalRunRecord, EvalType
|
|
9
|
+
from agno.db.schemas.knowledge import KnowledgeRow
|
|
10
|
+
from agno.db.schemas.memory import UserMemory
|
|
11
|
+
from agno.db.sqlite.schemas import get_table_schema_definition
|
|
12
|
+
from agno.db.sqlite.utils import (
|
|
13
|
+
apply_sorting,
|
|
14
|
+
bulk_upsert_metrics,
|
|
15
|
+
calculate_date_metrics,
|
|
16
|
+
fetch_all_sessions_data,
|
|
17
|
+
get_dates_to_calculate_metrics_for,
|
|
18
|
+
is_table_available,
|
|
19
|
+
is_valid_table,
|
|
20
|
+
)
|
|
21
|
+
from agno.db.utils import deserialize_session_json_fields, serialize_session_json_fields
|
|
22
|
+
from agno.session import AgentSession, Session, TeamSession, WorkflowSession
|
|
23
|
+
from agno.utils.log import log_debug, log_error, log_info, log_warning
|
|
24
|
+
|
|
25
|
+
try:
|
|
26
|
+
from sqlalchemy import Column, MetaData, Table, and_, func, select, text, update
|
|
27
|
+
from sqlalchemy.dialects import sqlite
|
|
28
|
+
from sqlalchemy.engine import Engine, create_engine
|
|
29
|
+
from sqlalchemy.orm import scoped_session, sessionmaker
|
|
30
|
+
from sqlalchemy.schema import Index, UniqueConstraint
|
|
31
|
+
except ImportError:
|
|
32
|
+
raise ImportError("`sqlalchemy` not installed. Please install it using `pip install sqlalchemy`")
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class SqliteDb(BaseDb):
|
|
36
|
+
def __init__(
|
|
37
|
+
self,
|
|
38
|
+
db_engine: Optional[Engine] = None,
|
|
39
|
+
db_url: Optional[str] = None,
|
|
40
|
+
db_file: Optional[str] = None,
|
|
41
|
+
session_table: Optional[str] = None,
|
|
42
|
+
memory_table: Optional[str] = None,
|
|
43
|
+
metrics_table: Optional[str] = None,
|
|
44
|
+
eval_table: Optional[str] = None,
|
|
45
|
+
knowledge_table: Optional[str] = None,
|
|
46
|
+
):
|
|
47
|
+
"""
|
|
48
|
+
Interface for interacting with a SQLite database.
|
|
49
|
+
|
|
50
|
+
The following order is used to determine the database connection:
|
|
51
|
+
1. Use the db_engine
|
|
52
|
+
2. Use the db_url
|
|
53
|
+
3. Use the db_file
|
|
54
|
+
4. Create a new database in the current directory
|
|
55
|
+
|
|
56
|
+
Args:
|
|
57
|
+
db_engine (Optional[Engine]): The SQLAlchemy database engine to use.
|
|
58
|
+
db_url (Optional[str]): The database URL to connect to.
|
|
59
|
+
db_file (Optional[str]): The database file to connect to.
|
|
60
|
+
session_table (Optional[str]): Name of the table to store Agent, Team and Workflow sessions.
|
|
61
|
+
memory_table (Optional[str]): Name of the table to store user memories.
|
|
62
|
+
metrics_table (Optional[str]): Name of the table to store metrics.
|
|
63
|
+
eval_table (Optional[str]): Name of the table to store evaluation runs data.
|
|
64
|
+
knowledge_table (Optional[str]): Name of the table to store knowledge documents data.
|
|
65
|
+
|
|
66
|
+
Raises:
|
|
67
|
+
ValueError: If none of the tables are provided.
|
|
68
|
+
"""
|
|
69
|
+
super().__init__(
|
|
70
|
+
session_table=session_table,
|
|
71
|
+
memory_table=memory_table,
|
|
72
|
+
metrics_table=metrics_table,
|
|
73
|
+
eval_table=eval_table,
|
|
74
|
+
knowledge_table=knowledge_table,
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
_engine: Optional[Engine] = db_engine
|
|
78
|
+
if _engine is None:
|
|
79
|
+
if db_url is not None:
|
|
80
|
+
_engine = create_engine(db_url)
|
|
81
|
+
elif db_file is not None:
|
|
82
|
+
db_path = Path(db_file).resolve()
|
|
83
|
+
db_path.parent.mkdir(parents=True, exist_ok=True)
|
|
84
|
+
db_file = str(db_path)
|
|
85
|
+
_engine = create_engine(f"sqlite:///{db_path}")
|
|
86
|
+
else:
|
|
87
|
+
# If none of db_engine, db_url, or db_file are provided, create a db in the current directory
|
|
88
|
+
default_db_path = Path("./agno.db").resolve()
|
|
89
|
+
_engine = create_engine(f"sqlite:///{default_db_path}")
|
|
90
|
+
db_file = str(default_db_path)
|
|
91
|
+
log_debug(f"Created SQLite database: {default_db_path}")
|
|
92
|
+
|
|
93
|
+
self.db_engine: Engine = _engine
|
|
94
|
+
self.db_url: Optional[str] = db_url
|
|
95
|
+
self.db_file: Optional[str] = db_file
|
|
96
|
+
self.metadata: MetaData = MetaData()
|
|
97
|
+
|
|
98
|
+
# Initialize database session
|
|
99
|
+
self.Session: scoped_session = scoped_session(sessionmaker(bind=self.db_engine))
|
|
100
|
+
|
|
101
|
+
# -- DB methods --
|
|
102
|
+
|
|
103
|
+
def _create_table(self, table_name: str, table_type: str) -> Table:
|
|
104
|
+
"""
|
|
105
|
+
Create a table with the appropriate schema based on the table type.
|
|
106
|
+
|
|
107
|
+
Args:
|
|
108
|
+
table_name (str): Name of the table to create
|
|
109
|
+
table_type (str): Type of table (used to get schema definition)
|
|
110
|
+
|
|
111
|
+
Returns:
|
|
112
|
+
Table: SQLAlchemy Table object
|
|
113
|
+
"""
|
|
114
|
+
try:
|
|
115
|
+
table_schema = get_table_schema_definition(table_type)
|
|
116
|
+
log_debug(f"Creating table {table_name} with schema: {table_schema}")
|
|
117
|
+
|
|
118
|
+
columns: List[Column] = []
|
|
119
|
+
indexes: List[str] = []
|
|
120
|
+
unique_constraints: List[str] = []
|
|
121
|
+
schema_unique_constraints = table_schema.pop("_unique_constraints", [])
|
|
122
|
+
|
|
123
|
+
# Get the columns, indexes, and unique constraints from the table schema
|
|
124
|
+
for col_name, col_config in table_schema.items():
|
|
125
|
+
column_args = [col_name, col_config["type"]()]
|
|
126
|
+
column_kwargs = {}
|
|
127
|
+
|
|
128
|
+
if col_config.get("primary_key", False):
|
|
129
|
+
column_kwargs["primary_key"] = True
|
|
130
|
+
if "nullable" in col_config:
|
|
131
|
+
column_kwargs["nullable"] = col_config["nullable"]
|
|
132
|
+
if col_config.get("index", False):
|
|
133
|
+
indexes.append(col_name)
|
|
134
|
+
if col_config.get("unique", False):
|
|
135
|
+
column_kwargs["unique"] = True
|
|
136
|
+
unique_constraints.append(col_name)
|
|
137
|
+
|
|
138
|
+
columns.append(Column(*column_args, **column_kwargs)) # type: ignore
|
|
139
|
+
|
|
140
|
+
# Create the table object
|
|
141
|
+
table_metadata = MetaData()
|
|
142
|
+
table = Table(table_name, table_metadata, *columns)
|
|
143
|
+
|
|
144
|
+
# Add multi-column unique constraints with table-specific names
|
|
145
|
+
for constraint in schema_unique_constraints:
|
|
146
|
+
constraint_name = f"{table_name}_{constraint['name']}"
|
|
147
|
+
constraint_columns = constraint["columns"]
|
|
148
|
+
table.append_constraint(UniqueConstraint(*constraint_columns, name=constraint_name))
|
|
149
|
+
|
|
150
|
+
# Add indexes to the table definition
|
|
151
|
+
for idx_col in indexes:
|
|
152
|
+
idx_name = f"idx_{table_name}_{idx_col}"
|
|
153
|
+
table.append_constraint(Index(idx_name, idx_col))
|
|
154
|
+
|
|
155
|
+
# Create table
|
|
156
|
+
table.create(self.db_engine, checkfirst=True)
|
|
157
|
+
|
|
158
|
+
# Create indexes
|
|
159
|
+
for idx in table.indexes:
|
|
160
|
+
try:
|
|
161
|
+
log_debug(f"Creating index: {idx.name}")
|
|
162
|
+
# Check if index already exists
|
|
163
|
+
with self.Session() as sess:
|
|
164
|
+
exists_query = text("SELECT 1 FROM sqlite_master WHERE type = 'index' AND name = :index_name")
|
|
165
|
+
exists = sess.execute(exists_query, {"index_name": idx.name}).scalar() is not None
|
|
166
|
+
if exists:
|
|
167
|
+
log_debug(f"Index {idx.name} already exists in table {table_name}, skipping creation")
|
|
168
|
+
continue
|
|
169
|
+
|
|
170
|
+
idx.create(self.db_engine)
|
|
171
|
+
|
|
172
|
+
except Exception as e:
|
|
173
|
+
log_warning(f"Error creating index {idx.name}: {e}")
|
|
174
|
+
|
|
175
|
+
log_info(f"Successfully created table '{table_name}'")
|
|
176
|
+
return table
|
|
177
|
+
|
|
178
|
+
except Exception as e:
|
|
179
|
+
log_error(f"Could not create table '{table_name}': {e}")
|
|
180
|
+
raise
|
|
181
|
+
|
|
182
|
+
def _get_table(self, table_type: str, create_table_if_not_found: Optional[bool] = False) -> Optional[Table]:
|
|
183
|
+
if table_type == "sessions":
|
|
184
|
+
self.session_table = self._get_or_create_table(
|
|
185
|
+
table_name=self.session_table_name,
|
|
186
|
+
table_type=table_type,
|
|
187
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
188
|
+
)
|
|
189
|
+
return self.session_table
|
|
190
|
+
|
|
191
|
+
elif table_type == "memories":
|
|
192
|
+
self.memory_table = self._get_or_create_table(
|
|
193
|
+
table_name=self.memory_table_name,
|
|
194
|
+
table_type="memories",
|
|
195
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
196
|
+
)
|
|
197
|
+
return self.memory_table
|
|
198
|
+
|
|
199
|
+
elif table_type == "metrics":
|
|
200
|
+
self.metrics_table = self._get_or_create_table(
|
|
201
|
+
table_name=self.metrics_table_name,
|
|
202
|
+
table_type="metrics",
|
|
203
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
204
|
+
)
|
|
205
|
+
return self.metrics_table
|
|
206
|
+
|
|
207
|
+
elif table_type == "evals":
|
|
208
|
+
self.eval_table = self._get_or_create_table(
|
|
209
|
+
table_name=self.eval_table_name,
|
|
210
|
+
table_type="evals",
|
|
211
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
212
|
+
)
|
|
213
|
+
|
|
214
|
+
return self.eval_table
|
|
215
|
+
|
|
216
|
+
elif table_type == "knowledge":
|
|
217
|
+
self.knowledge_table = self._get_or_create_table(
|
|
218
|
+
table_name=self.knowledge_table_name,
|
|
219
|
+
table_type="knowledge",
|
|
220
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
221
|
+
)
|
|
222
|
+
return self.knowledge_table
|
|
223
|
+
|
|
224
|
+
else:
|
|
225
|
+
raise ValueError(f"Unknown table type: '{table_type}'")
|
|
226
|
+
|
|
227
|
+
def _get_or_create_table(
|
|
228
|
+
self, table_name: str, table_type: str, create_table_if_not_found: Optional[bool] = False
|
|
229
|
+
) -> Optional[Table]:
|
|
230
|
+
"""
|
|
231
|
+
Check if the table exists and is valid, else create it.
|
|
232
|
+
|
|
233
|
+
Args:
|
|
234
|
+
table_name (str): Name of the table to get or create
|
|
235
|
+
table_type (str): Type of table (used to get schema definition)
|
|
236
|
+
|
|
237
|
+
Returns:
|
|
238
|
+
Table: SQLAlchemy Table object
|
|
239
|
+
"""
|
|
240
|
+
with self.Session() as sess, sess.begin():
|
|
241
|
+
table_is_available = is_table_available(session=sess, table_name=table_name)
|
|
242
|
+
|
|
243
|
+
if not table_is_available:
|
|
244
|
+
if not create_table_if_not_found:
|
|
245
|
+
return None
|
|
246
|
+
return self._create_table(table_name=table_name, table_type=table_type)
|
|
247
|
+
|
|
248
|
+
# SQLite version of table validation (no schema)
|
|
249
|
+
if not is_valid_table(db_engine=self.db_engine, table_name=table_name, table_type=table_type):
|
|
250
|
+
raise ValueError(f"Table {table_name} has an invalid schema")
|
|
251
|
+
|
|
252
|
+
try:
|
|
253
|
+
table = Table(table_name, self.metadata, autoload_with=self.db_engine)
|
|
254
|
+
log_debug(f"Loaded existing table {table_name}")
|
|
255
|
+
return table
|
|
256
|
+
|
|
257
|
+
except Exception as e:
|
|
258
|
+
log_error(f"Error loading existing table {table_name}: {e}")
|
|
259
|
+
raise
|
|
260
|
+
|
|
261
|
+
# -- Session methods --
|
|
262
|
+
|
|
263
|
+
def delete_session(self, session_id: str) -> bool:
|
|
264
|
+
"""
|
|
265
|
+
Delete a session from the database.
|
|
266
|
+
|
|
267
|
+
Args:
|
|
268
|
+
session_id (str): ID of the session to delete
|
|
269
|
+
|
|
270
|
+
Raises:
|
|
271
|
+
Exception: If an error occurs during deletion.
|
|
272
|
+
"""
|
|
273
|
+
try:
|
|
274
|
+
table = self._get_table(table_type="sessions")
|
|
275
|
+
if table is None:
|
|
276
|
+
return False
|
|
277
|
+
|
|
278
|
+
with self.Session() as sess, sess.begin():
|
|
279
|
+
delete_stmt = table.delete().where(table.c.session_id == session_id)
|
|
280
|
+
result = sess.execute(delete_stmt)
|
|
281
|
+
if result.rowcount == 0:
|
|
282
|
+
log_debug(f"No session found to deletewith session_id: {session_id}")
|
|
283
|
+
return False
|
|
284
|
+
else:
|
|
285
|
+
log_debug(f"Successfully deleted session with session_id: {session_id}")
|
|
286
|
+
return True
|
|
287
|
+
|
|
288
|
+
except Exception as e:
|
|
289
|
+
log_error(f"Error deleting session: {e}")
|
|
290
|
+
return False
|
|
291
|
+
|
|
292
|
+
def delete_sessions(self, session_ids: List[str]) -> None:
|
|
293
|
+
"""Delete all given sessions from the database.
|
|
294
|
+
Can handle multiple session types in the same run.
|
|
295
|
+
|
|
296
|
+
Args:
|
|
297
|
+
session_ids (List[str]): The IDs of the sessions to delete.
|
|
298
|
+
|
|
299
|
+
Raises:
|
|
300
|
+
Exception: If an error occurs during deletion.
|
|
301
|
+
"""
|
|
302
|
+
try:
|
|
303
|
+
table = self._get_table(table_type="sessions")
|
|
304
|
+
if table is None:
|
|
305
|
+
return
|
|
306
|
+
|
|
307
|
+
with self.Session() as sess, sess.begin():
|
|
308
|
+
delete_stmt = table.delete().where(table.c.session_id.in_(session_ids))
|
|
309
|
+
result = sess.execute(delete_stmt)
|
|
310
|
+
|
|
311
|
+
log_debug(f"Successfully deleted {result.rowcount} sessions")
|
|
312
|
+
|
|
313
|
+
except Exception as e:
|
|
314
|
+
log_error(f"Error deleting sessions: {e}")
|
|
315
|
+
|
|
316
|
+
def get_session(
|
|
317
|
+
self,
|
|
318
|
+
session_id: str,
|
|
319
|
+
session_type: SessionType,
|
|
320
|
+
user_id: Optional[str] = None,
|
|
321
|
+
deserialize: Optional[bool] = True,
|
|
322
|
+
) -> Optional[Union[Session, Dict[str, Any]]]:
|
|
323
|
+
"""
|
|
324
|
+
Read a session from the database.
|
|
325
|
+
|
|
326
|
+
Args:
|
|
327
|
+
session_id (str): ID of the session to read.
|
|
328
|
+
user_id (Optional[str]): User ID to filter by. Defaults to None.
|
|
329
|
+
session_type (Optional[SessionType]): Type of session to read. Defaults to None.
|
|
330
|
+
deserialize (Optional[bool]): Whether to serialize the session. Defaults to True.
|
|
331
|
+
|
|
332
|
+
Returns:
|
|
333
|
+
Optional[Union[Session, Dict[str, Any]]]:
|
|
334
|
+
- When deserialize=True: Session object
|
|
335
|
+
- When deserialize=False: Session dictionary
|
|
336
|
+
|
|
337
|
+
Raises:
|
|
338
|
+
Exception: If an error occurs during retrieval.
|
|
339
|
+
"""
|
|
340
|
+
try:
|
|
341
|
+
table = self._get_table(table_type="sessions")
|
|
342
|
+
if table is None:
|
|
343
|
+
return None
|
|
344
|
+
|
|
345
|
+
with self.Session() as sess, sess.begin():
|
|
346
|
+
stmt = select(table).where(table.c.session_id == session_id)
|
|
347
|
+
|
|
348
|
+
# Filtering
|
|
349
|
+
if user_id is not None:
|
|
350
|
+
stmt = stmt.where(table.c.user_id == user_id)
|
|
351
|
+
if session_type is not None:
|
|
352
|
+
stmt = stmt.where(table.c.session_type == session_type)
|
|
353
|
+
|
|
354
|
+
result = sess.execute(stmt).fetchone()
|
|
355
|
+
if result is None:
|
|
356
|
+
return None
|
|
357
|
+
|
|
358
|
+
session_raw = deserialize_session_json_fields(dict(result._mapping))
|
|
359
|
+
if not session_raw or not deserialize:
|
|
360
|
+
return session_raw
|
|
361
|
+
|
|
362
|
+
if session_type == SessionType.AGENT:
|
|
363
|
+
return AgentSession.from_dict(session_raw)
|
|
364
|
+
elif session_type == SessionType.TEAM:
|
|
365
|
+
return TeamSession.from_dict(session_raw)
|
|
366
|
+
elif session_type == SessionType.WORKFLOW:
|
|
367
|
+
return WorkflowSession.from_dict(session_raw)
|
|
368
|
+
else:
|
|
369
|
+
raise ValueError(f"Invalid session type: {session_type}")
|
|
370
|
+
|
|
371
|
+
except Exception as e:
|
|
372
|
+
log_debug(f"Exception reading from sessions table: {e}")
|
|
373
|
+
return None
|
|
374
|
+
|
|
375
|
+
def get_sessions(
|
|
376
|
+
self,
|
|
377
|
+
session_type: Optional[SessionType] = None,
|
|
378
|
+
user_id: Optional[str] = None,
|
|
379
|
+
component_id: Optional[str] = None,
|
|
380
|
+
session_name: Optional[str] = None,
|
|
381
|
+
start_timestamp: Optional[int] = None,
|
|
382
|
+
end_timestamp: Optional[int] = None,
|
|
383
|
+
limit: Optional[int] = None,
|
|
384
|
+
page: Optional[int] = None,
|
|
385
|
+
sort_by: Optional[str] = None,
|
|
386
|
+
sort_order: Optional[str] = None,
|
|
387
|
+
deserialize: Optional[bool] = True,
|
|
388
|
+
) -> Union[List[Session], Tuple[List[Dict[str, Any]], int]]:
|
|
389
|
+
"""
|
|
390
|
+
Get all sessions in the given table. Can filter by user_id and entity_id.
|
|
391
|
+
Args:
|
|
392
|
+
session_type (Optional[SessionType]): The type of session to get.
|
|
393
|
+
user_id (Optional[str]): The ID of the user to filter by.
|
|
394
|
+
component_id (Optional[str]): The ID of the agent / workflow to filter by.
|
|
395
|
+
session_name (Optional[str]): The name of the session to filter by.
|
|
396
|
+
start_timestamp (Optional[int]): The start timestamp to filter by.
|
|
397
|
+
end_timestamp (Optional[int]): The end timestamp to filter by.
|
|
398
|
+
limit (Optional[int]): The maximum number of sessions to return. Defaults to None.
|
|
399
|
+
page (Optional[int]): The page number to return. Defaults to None.
|
|
400
|
+
sort_by (Optional[str]): The field to sort by. Defaults to None.
|
|
401
|
+
sort_order (Optional[str]): The sort order. Defaults to None.
|
|
402
|
+
deserialize (Optional[bool]): Whether to serialize the sessions. Defaults to True.
|
|
403
|
+
create_table_if_not_found (Optional[bool]): Whether to create the table if it doesn't exist.
|
|
404
|
+
|
|
405
|
+
Returns:
|
|
406
|
+
List[Session]:
|
|
407
|
+
- When deserialize=True: List of Session objects matching the criteria.
|
|
408
|
+
- When deserialize=False: List of Session dictionaries matching the criteria.
|
|
409
|
+
|
|
410
|
+
Raises:
|
|
411
|
+
Exception: If an error occurs during retrieval.
|
|
412
|
+
"""
|
|
413
|
+
try:
|
|
414
|
+
table = self._get_table(table_type="sessions")
|
|
415
|
+
if table is None:
|
|
416
|
+
return [] if deserialize else ([], 0)
|
|
417
|
+
|
|
418
|
+
with self.Session() as sess, sess.begin():
|
|
419
|
+
stmt = select(table)
|
|
420
|
+
|
|
421
|
+
# Filtering
|
|
422
|
+
if user_id is not None:
|
|
423
|
+
stmt = stmt.where(table.c.user_id == user_id)
|
|
424
|
+
if component_id is not None:
|
|
425
|
+
if session_type == SessionType.AGENT:
|
|
426
|
+
stmt = stmt.where(table.c.agent_id == component_id)
|
|
427
|
+
elif session_type == SessionType.TEAM:
|
|
428
|
+
stmt = stmt.where(table.c.team_id == component_id)
|
|
429
|
+
elif session_type == SessionType.WORKFLOW:
|
|
430
|
+
stmt = stmt.where(table.c.workflow_id == component_id)
|
|
431
|
+
if start_timestamp is not None:
|
|
432
|
+
stmt = stmt.where(table.c.created_at >= start_timestamp)
|
|
433
|
+
if end_timestamp is not None:
|
|
434
|
+
stmt = stmt.where(table.c.created_at <= end_timestamp)
|
|
435
|
+
if session_name is not None:
|
|
436
|
+
stmt = stmt.where(
|
|
437
|
+
func.coalesce(func.json_extract(table.c.session_data, "$.session_name"), "").like(
|
|
438
|
+
f"%{session_name}%"
|
|
439
|
+
)
|
|
440
|
+
)
|
|
441
|
+
if session_type is not None:
|
|
442
|
+
stmt = stmt.where(table.c.session_type == session_type.value)
|
|
443
|
+
|
|
444
|
+
# Getting total count
|
|
445
|
+
count_stmt = select(func.count()).select_from(stmt.alias())
|
|
446
|
+
total_count = sess.execute(count_stmt).scalar()
|
|
447
|
+
|
|
448
|
+
# Sorting
|
|
449
|
+
stmt = apply_sorting(stmt, table, sort_by, sort_order)
|
|
450
|
+
|
|
451
|
+
# Paginating
|
|
452
|
+
if limit is not None:
|
|
453
|
+
stmt = stmt.limit(limit)
|
|
454
|
+
if page is not None:
|
|
455
|
+
stmt = stmt.offset((page - 1) * limit)
|
|
456
|
+
|
|
457
|
+
records = sess.execute(stmt).fetchall()
|
|
458
|
+
if records is None:
|
|
459
|
+
return [] if deserialize else ([], 0)
|
|
460
|
+
|
|
461
|
+
sessions_raw = [deserialize_session_json_fields(dict(record._mapping)) for record in records]
|
|
462
|
+
if not sessions_raw or not deserialize:
|
|
463
|
+
return sessions_raw, total_count
|
|
464
|
+
|
|
465
|
+
if session_type == SessionType.AGENT:
|
|
466
|
+
return [AgentSession.from_dict(record) for record in sessions_raw] # type: ignore
|
|
467
|
+
elif session_type == SessionType.TEAM:
|
|
468
|
+
return [TeamSession.from_dict(record) for record in sessions_raw] # type: ignore
|
|
469
|
+
elif session_type == SessionType.WORKFLOW:
|
|
470
|
+
return [WorkflowSession.from_dict(record) for record in sessions_raw] # type: ignore
|
|
471
|
+
else:
|
|
472
|
+
raise ValueError(f"Invalid session type: {session_type}")
|
|
473
|
+
|
|
474
|
+
except Exception as e:
|
|
475
|
+
log_debug(f"Exception reading from sessions table: {e}")
|
|
476
|
+
return []
|
|
477
|
+
|
|
478
|
+
def rename_session(
|
|
479
|
+
self, session_id: str, session_type: SessionType, session_name: str, deserialize: Optional[bool] = True
|
|
480
|
+
) -> Optional[Union[Session, Dict[str, Any]]]:
|
|
481
|
+
"""
|
|
482
|
+
Rename a session in the database.
|
|
483
|
+
|
|
484
|
+
Args:
|
|
485
|
+
session_id (str): The ID of the session to rename.
|
|
486
|
+
session_type (SessionType): The type of session to rename.
|
|
487
|
+
session_name (str): The new name for the session.
|
|
488
|
+
deserialize (Optional[bool]): Whether to serialize the session. Defaults to True.
|
|
489
|
+
|
|
490
|
+
Returns:
|
|
491
|
+
Optional[Union[Session, Dict[str, Any]]]:
|
|
492
|
+
- When deserialize=True: Session object
|
|
493
|
+
- When deserialize=False: Session dictionary
|
|
494
|
+
|
|
495
|
+
Raises:
|
|
496
|
+
Exception: If an error occurs during renaming.
|
|
497
|
+
"""
|
|
498
|
+
try:
|
|
499
|
+
table = self._get_table(table_type="sessions")
|
|
500
|
+
if table is None:
|
|
501
|
+
return None
|
|
502
|
+
|
|
503
|
+
with self.Session() as sess, sess.begin():
|
|
504
|
+
# Update session_name inside the session_data JSON field
|
|
505
|
+
stmt = (
|
|
506
|
+
update(table)
|
|
507
|
+
.where(table.c.session_id == session_id)
|
|
508
|
+
.values(session_data=func.json_set(table.c.session_data, "$.session_name", session_name))
|
|
509
|
+
)
|
|
510
|
+
result = sess.execute(stmt)
|
|
511
|
+
|
|
512
|
+
# Check if any rows were affected
|
|
513
|
+
if result.rowcount == 0:
|
|
514
|
+
return None
|
|
515
|
+
|
|
516
|
+
# Fetch the updated row
|
|
517
|
+
select_stmt = select(table).where(table.c.session_id == session_id)
|
|
518
|
+
row = sess.execute(select_stmt).fetchone()
|
|
519
|
+
|
|
520
|
+
if not row:
|
|
521
|
+
return None
|
|
522
|
+
|
|
523
|
+
session_raw = deserialize_session_json_fields(dict(row._mapping))
|
|
524
|
+
if not session_raw or not deserialize:
|
|
525
|
+
return session_raw
|
|
526
|
+
|
|
527
|
+
# Return the appropriate session type
|
|
528
|
+
if session_type == SessionType.AGENT:
|
|
529
|
+
return AgentSession.from_dict(session_raw)
|
|
530
|
+
elif session_type == SessionType.TEAM:
|
|
531
|
+
return TeamSession.from_dict(session_raw)
|
|
532
|
+
elif session_type == SessionType.WORKFLOW:
|
|
533
|
+
return WorkflowSession.from_dict(session_raw)
|
|
534
|
+
else:
|
|
535
|
+
raise ValueError(f"Invalid session type: {session_type}")
|
|
536
|
+
|
|
537
|
+
except Exception as e:
|
|
538
|
+
log_error(f"Exception renaming session: {e}")
|
|
539
|
+
return None
|
|
540
|
+
|
|
541
|
+
def upsert_session(
|
|
542
|
+
self, session: Session, deserialize: Optional[bool] = True
|
|
543
|
+
) -> Optional[Union[Session, Dict[str, Any]]]:
|
|
544
|
+
"""
|
|
545
|
+
Insert or update a session in the database.
|
|
546
|
+
|
|
547
|
+
Args:
|
|
548
|
+
session (Session): The session data to upsert.
|
|
549
|
+
deserialize (Optional[bool]): Whether to serialize the session. Defaults to True.
|
|
550
|
+
|
|
551
|
+
Returns:
|
|
552
|
+
Optional[Session]:
|
|
553
|
+
- When deserialize=True: Session object
|
|
554
|
+
- When deserialize=False: Session dictionary
|
|
555
|
+
|
|
556
|
+
Raises:
|
|
557
|
+
Exception: If an error occurs during upserting.
|
|
558
|
+
"""
|
|
559
|
+
try:
|
|
560
|
+
table = self._get_table(table_type="sessions", create_table_if_not_found=True)
|
|
561
|
+
if table is None:
|
|
562
|
+
return None
|
|
563
|
+
|
|
564
|
+
serialized_session = serialize_session_json_fields(session.to_dict())
|
|
565
|
+
|
|
566
|
+
if isinstance(session, AgentSession):
|
|
567
|
+
with self.Session() as sess, sess.begin():
|
|
568
|
+
stmt = sqlite.insert(table).values(
|
|
569
|
+
session_id=serialized_session.get("session_id"),
|
|
570
|
+
session_type=SessionType.AGENT.value,
|
|
571
|
+
agent_id=serialized_session.get("agent_id"),
|
|
572
|
+
user_id=serialized_session.get("user_id"),
|
|
573
|
+
agent_data=serialized_session.get("agent_data"),
|
|
574
|
+
session_data=serialized_session.get("session_data"),
|
|
575
|
+
metadata=serialized_session.get("metadata"),
|
|
576
|
+
runs=serialized_session.get("runs"),
|
|
577
|
+
summary=serialized_session.get("summary"),
|
|
578
|
+
created_at=serialized_session.get("created_at"),
|
|
579
|
+
updated_at=serialized_session.get("created_at"),
|
|
580
|
+
)
|
|
581
|
+
stmt = stmt.on_conflict_do_update(
|
|
582
|
+
index_elements=["session_id"],
|
|
583
|
+
set_=dict(
|
|
584
|
+
agent_id=serialized_session.get("agent_id"),
|
|
585
|
+
user_id=serialized_session.get("user_id"),
|
|
586
|
+
runs=serialized_session.get("runs"),
|
|
587
|
+
summary=serialized_session.get("summary"),
|
|
588
|
+
agent_data=serialized_session.get("agent_data"),
|
|
589
|
+
session_data=serialized_session.get("session_data"),
|
|
590
|
+
metadata=serialized_session.get("metadata"),
|
|
591
|
+
updated_at=int(time.time()),
|
|
592
|
+
),
|
|
593
|
+
)
|
|
594
|
+
stmt = stmt.returning(*table.columns) # type: ignore
|
|
595
|
+
result = sess.execute(stmt)
|
|
596
|
+
row = result.fetchone()
|
|
597
|
+
|
|
598
|
+
session_raw = deserialize_session_json_fields(dict(row._mapping)) if row else None
|
|
599
|
+
if session_raw is None or not deserialize:
|
|
600
|
+
return session_raw
|
|
601
|
+
return AgentSession.from_dict(session_raw)
|
|
602
|
+
|
|
603
|
+
elif isinstance(session, TeamSession):
|
|
604
|
+
with self.Session() as sess, sess.begin():
|
|
605
|
+
stmt = sqlite.insert(table).values(
|
|
606
|
+
session_id=serialized_session.get("session_id"),
|
|
607
|
+
session_type=SessionType.TEAM.value,
|
|
608
|
+
team_id=serialized_session.get("team_id"),
|
|
609
|
+
user_id=serialized_session.get("user_id"),
|
|
610
|
+
runs=serialized_session.get("runs"),
|
|
611
|
+
summary=serialized_session.get("summary"),
|
|
612
|
+
created_at=serialized_session.get("created_at"),
|
|
613
|
+
updated_at=serialized_session.get("created_at"),
|
|
614
|
+
team_data=serialized_session.get("team_data"),
|
|
615
|
+
session_data=serialized_session.get("session_data"),
|
|
616
|
+
metadata=serialized_session.get("metadata"),
|
|
617
|
+
)
|
|
618
|
+
|
|
619
|
+
stmt = stmt.on_conflict_do_update(
|
|
620
|
+
index_elements=["session_id"],
|
|
621
|
+
set_=dict(
|
|
622
|
+
team_id=serialized_session.get("team_id"),
|
|
623
|
+
user_id=serialized_session.get("user_id"),
|
|
624
|
+
summary=serialized_session.get("summary"),
|
|
625
|
+
runs=serialized_session.get("runs"),
|
|
626
|
+
team_data=serialized_session.get("team_data"),
|
|
627
|
+
session_data=serialized_session.get("session_data"),
|
|
628
|
+
metadata=serialized_session.get("metadata"),
|
|
629
|
+
updated_at=int(time.time()),
|
|
630
|
+
),
|
|
631
|
+
)
|
|
632
|
+
stmt = stmt.returning(*table.columns) # type: ignore
|
|
633
|
+
result = sess.execute(stmt)
|
|
634
|
+
row = result.fetchone()
|
|
635
|
+
|
|
636
|
+
session_raw = deserialize_session_json_fields(dict(row._mapping)) if row else None
|
|
637
|
+
if session_raw is None or not deserialize:
|
|
638
|
+
return session_raw
|
|
639
|
+
return TeamSession.from_dict(session_raw)
|
|
640
|
+
|
|
641
|
+
else:
|
|
642
|
+
with self.Session() as sess, sess.begin():
|
|
643
|
+
stmt = sqlite.insert(table).values(
|
|
644
|
+
session_id=serialized_session.get("session_id"),
|
|
645
|
+
session_type=SessionType.WORKFLOW.value,
|
|
646
|
+
workflow_id=serialized_session.get("workflow_id"),
|
|
647
|
+
user_id=serialized_session.get("user_id"),
|
|
648
|
+
runs=serialized_session.get("runs"),
|
|
649
|
+
summary=serialized_session.get("summary"),
|
|
650
|
+
created_at=serialized_session.get("created_at") or int(time.time()),
|
|
651
|
+
updated_at=serialized_session.get("updated_at") or int(time.time()),
|
|
652
|
+
workflow_data=serialized_session.get("workflow_data"),
|
|
653
|
+
session_data=serialized_session.get("session_data"),
|
|
654
|
+
metadata=serialized_session.get("metadata"),
|
|
655
|
+
)
|
|
656
|
+
stmt = stmt.on_conflict_do_update(
|
|
657
|
+
index_elements=["session_id"],
|
|
658
|
+
set_=dict(
|
|
659
|
+
workflow_id=serialized_session.get("workflow_id"),
|
|
660
|
+
user_id=serialized_session.get("user_id"),
|
|
661
|
+
summary=serialized_session.get("summary"),
|
|
662
|
+
runs=serialized_session.get("runs"),
|
|
663
|
+
workflow_data=serialized_session.get("workflow_data"),
|
|
664
|
+
session_data=serialized_session.get("session_data"),
|
|
665
|
+
metadata=serialized_session.get("metadata"),
|
|
666
|
+
updated_at=int(time.time()),
|
|
667
|
+
),
|
|
668
|
+
)
|
|
669
|
+
stmt = stmt.returning(*table.columns) # type: ignore
|
|
670
|
+
result = sess.execute(stmt)
|
|
671
|
+
row = result.fetchone()
|
|
672
|
+
|
|
673
|
+
session_raw = deserialize_session_json_fields(dict(row._mapping)) if row else None
|
|
674
|
+
if session_raw is None or not deserialize:
|
|
675
|
+
return session_raw
|
|
676
|
+
return WorkflowSession.from_dict(session_raw)
|
|
677
|
+
|
|
678
|
+
except Exception as e:
|
|
679
|
+
log_warning(f"Exception upserting into table: {e}")
|
|
680
|
+
return None
|
|
681
|
+
|
|
682
|
+
# -- Memory methods --
|
|
683
|
+
|
|
684
|
+
def delete_user_memory(self, memory_id: str):
|
|
685
|
+
"""Delete a user memory from the database.
|
|
686
|
+
|
|
687
|
+
Returns:
|
|
688
|
+
bool: True if deletion was successful, False otherwise.
|
|
689
|
+
|
|
690
|
+
Raises:
|
|
691
|
+
Exception: If an error occurs during deletion.
|
|
692
|
+
"""
|
|
693
|
+
try:
|
|
694
|
+
table = self._get_table(table_type="memories")
|
|
695
|
+
if table is None:
|
|
696
|
+
return
|
|
697
|
+
|
|
698
|
+
with self.Session() as sess, sess.begin():
|
|
699
|
+
delete_stmt = table.delete().where(table.c.memory_id == memory_id)
|
|
700
|
+
result = sess.execute(delete_stmt)
|
|
701
|
+
|
|
702
|
+
success = result.rowcount > 0
|
|
703
|
+
if success:
|
|
704
|
+
log_debug(f"Successfully deleted user memory id: {memory_id}")
|
|
705
|
+
else:
|
|
706
|
+
log_debug(f"No user memory found with id: {memory_id}")
|
|
707
|
+
|
|
708
|
+
except Exception as e:
|
|
709
|
+
log_error(f"Error deleting user memory: {e}")
|
|
710
|
+
|
|
711
|
+
def delete_user_memories(self, memory_ids: List[str]) -> None:
|
|
712
|
+
"""Delete user memories from the database.
|
|
713
|
+
|
|
714
|
+
Args:
|
|
715
|
+
memory_ids (List[str]): The IDs of the memories to delete.
|
|
716
|
+
|
|
717
|
+
Raises:
|
|
718
|
+
Exception: If an error occurs during deletion.
|
|
719
|
+
"""
|
|
720
|
+
try:
|
|
721
|
+
table = self._get_table(table_type="memories")
|
|
722
|
+
if table is None:
|
|
723
|
+
return
|
|
724
|
+
|
|
725
|
+
with self.Session() as sess, sess.begin():
|
|
726
|
+
delete_stmt = table.delete().where(table.c.memory_id.in_(memory_ids))
|
|
727
|
+
result = sess.execute(delete_stmt)
|
|
728
|
+
if result.rowcount == 0:
|
|
729
|
+
log_debug(f"No user memories found with ids: {memory_ids}")
|
|
730
|
+
|
|
731
|
+
except Exception as e:
|
|
732
|
+
log_error(f"Error deleting user memories: {e}")
|
|
733
|
+
|
|
734
|
+
def get_all_memory_topics(self) -> List[str]:
|
|
735
|
+
"""Get all memory topics from the database.
|
|
736
|
+
|
|
737
|
+
Returns:
|
|
738
|
+
List[str]: List of memory topics.
|
|
739
|
+
"""
|
|
740
|
+
try:
|
|
741
|
+
table = self._get_table(table_type="memories")
|
|
742
|
+
if table is None:
|
|
743
|
+
return []
|
|
744
|
+
|
|
745
|
+
with self.Session() as sess, sess.begin():
|
|
746
|
+
stmt = select(func.json_array_elements_text(table.c.topics))
|
|
747
|
+
result = sess.execute(stmt).fetchall()
|
|
748
|
+
|
|
749
|
+
return list(set([record[0] for record in result]))
|
|
750
|
+
|
|
751
|
+
except Exception as e:
|
|
752
|
+
log_debug(f"Exception reading from memory table: {e}")
|
|
753
|
+
return []
|
|
754
|
+
|
|
755
|
+
def get_user_memory(
|
|
756
|
+
self, memory_id: str, deserialize: Optional[bool] = True
|
|
757
|
+
) -> Optional[Union[UserMemory, Dict[str, Any]]]:
|
|
758
|
+
"""Get a memory from the database.
|
|
759
|
+
|
|
760
|
+
Args:
|
|
761
|
+
memory_id (str): The ID of the memory to get.
|
|
762
|
+
deserialize (Optional[bool]): Whether to serialize the memory. Defaults to True.
|
|
763
|
+
|
|
764
|
+
Returns:
|
|
765
|
+
Optional[Union[UserMemory, Dict[str, Any]]]:
|
|
766
|
+
- When deserialize=True: UserMemory object
|
|
767
|
+
- When deserialize=False: Memory dictionary
|
|
768
|
+
|
|
769
|
+
Raises:
|
|
770
|
+
Exception: If an error occurs during retrieval.
|
|
771
|
+
"""
|
|
772
|
+
try:
|
|
773
|
+
table = self._get_table(table_type="memories")
|
|
774
|
+
if table is None:
|
|
775
|
+
return None
|
|
776
|
+
|
|
777
|
+
with self.Session() as sess, sess.begin():
|
|
778
|
+
stmt = select(table).where(table.c.memory_id == memory_id)
|
|
779
|
+
result = sess.execute(stmt).fetchone()
|
|
780
|
+
if result is None:
|
|
781
|
+
return None
|
|
782
|
+
|
|
783
|
+
memory_raw = dict(result._mapping)
|
|
784
|
+
if not memory_raw or not deserialize:
|
|
785
|
+
return memory_raw
|
|
786
|
+
|
|
787
|
+
return UserMemory.from_dict(memory_raw)
|
|
788
|
+
|
|
789
|
+
except Exception as e:
|
|
790
|
+
log_debug(f"Exception reading from memorytable: {e}")
|
|
791
|
+
return None
|
|
792
|
+
|
|
793
|
+
def get_user_memories(
|
|
794
|
+
self,
|
|
795
|
+
user_id: Optional[str] = None,
|
|
796
|
+
agent_id: Optional[str] = None,
|
|
797
|
+
team_id: Optional[str] = None,
|
|
798
|
+
topics: Optional[List[str]] = None,
|
|
799
|
+
search_content: Optional[str] = None,
|
|
800
|
+
limit: Optional[int] = None,
|
|
801
|
+
page: Optional[int] = None,
|
|
802
|
+
sort_by: Optional[str] = None,
|
|
803
|
+
sort_order: Optional[str] = None,
|
|
804
|
+
deserialize: Optional[bool] = True,
|
|
805
|
+
) -> Union[List[UserMemory], Tuple[List[Dict[str, Any]], int]]:
|
|
806
|
+
"""Get all memories from the database as UserMemory objects.
|
|
807
|
+
|
|
808
|
+
Args:
|
|
809
|
+
user_id (Optional[str]): The ID of the user to filter by.
|
|
810
|
+
agent_id (Optional[str]): The ID of the agent to filter by.
|
|
811
|
+
team_id (Optional[str]): The ID of the team to filter by.
|
|
812
|
+
topics (Optional[List[str]]): The topics to filter by.
|
|
813
|
+
search_content (Optional[str]): The content to search for.
|
|
814
|
+
limit (Optional[int]): The maximum number of memories to return.
|
|
815
|
+
page (Optional[int]): The page number.
|
|
816
|
+
sort_by (Optional[str]): The column to sort by.
|
|
817
|
+
sort_order (Optional[str]): The order to sort by.
|
|
818
|
+
deserialize (Optional[bool]): Whether to serialize the memories. Defaults to True.
|
|
819
|
+
|
|
820
|
+
|
|
821
|
+
Returns:
|
|
822
|
+
Union[List[UserMemory], Tuple[List[Dict[str, Any]], int]]:
|
|
823
|
+
- When deserialize=True: List of UserMemory objects
|
|
824
|
+
- When deserialize=False: List of UserMemory dictionaries and total count
|
|
825
|
+
|
|
826
|
+
Raises:
|
|
827
|
+
Exception: If an error occurs during retrieval.
|
|
828
|
+
"""
|
|
829
|
+
try:
|
|
830
|
+
table = self._get_table(table_type="memories")
|
|
831
|
+
if table is None:
|
|
832
|
+
return [] if deserialize else ([], 0)
|
|
833
|
+
|
|
834
|
+
with self.Session() as sess, sess.begin():
|
|
835
|
+
stmt = select(table)
|
|
836
|
+
|
|
837
|
+
# Filtering
|
|
838
|
+
if user_id is not None:
|
|
839
|
+
stmt = stmt.where(table.c.user_id == user_id)
|
|
840
|
+
if agent_id is not None:
|
|
841
|
+
stmt = stmt.where(table.c.agent_id == agent_id)
|
|
842
|
+
if team_id is not None:
|
|
843
|
+
stmt = stmt.where(table.c.team_id == team_id)
|
|
844
|
+
if topics is not None:
|
|
845
|
+
topic_conditions = [text(f"topics::text LIKE '%\"{topic}\"%'") for topic in topics]
|
|
846
|
+
stmt = stmt.where(and_(*topic_conditions))
|
|
847
|
+
if search_content is not None:
|
|
848
|
+
stmt = stmt.where(table.c.memory.ilike(f"%{search_content}%"))
|
|
849
|
+
|
|
850
|
+
# Get total count after applying filtering
|
|
851
|
+
count_stmt = select(func.count()).select_from(stmt.alias())
|
|
852
|
+
total_count = sess.execute(count_stmt).scalar()
|
|
853
|
+
|
|
854
|
+
# Sorting
|
|
855
|
+
stmt = apply_sorting(stmt, table, sort_by, sort_order)
|
|
856
|
+
# Paginating
|
|
857
|
+
if limit is not None:
|
|
858
|
+
stmt = stmt.limit(limit)
|
|
859
|
+
if page is not None:
|
|
860
|
+
stmt = stmt.offset((page - 1) * limit)
|
|
861
|
+
|
|
862
|
+
result = sess.execute(stmt).fetchall()
|
|
863
|
+
if not result:
|
|
864
|
+
return [] if deserialize else ([], 0)
|
|
865
|
+
|
|
866
|
+
memories_raw = [record._mapping for record in result]
|
|
867
|
+
|
|
868
|
+
if not deserialize:
|
|
869
|
+
return memories_raw, total_count
|
|
870
|
+
|
|
871
|
+
return [UserMemory.from_dict(record) for record in memories_raw]
|
|
872
|
+
|
|
873
|
+
except Exception as e:
|
|
874
|
+
log_error(f"Error reading from memory table: {e}")
|
|
875
|
+
return []
|
|
876
|
+
|
|
877
|
+
def get_user_memory_stats(
|
|
878
|
+
self,
|
|
879
|
+
limit: Optional[int] = None,
|
|
880
|
+
page: Optional[int] = None,
|
|
881
|
+
) -> Tuple[List[Dict[str, Any]], int]:
|
|
882
|
+
"""Get user memories stats.
|
|
883
|
+
|
|
884
|
+
Args:
|
|
885
|
+
limit (Optional[int]): The maximum number of user stats to return.
|
|
886
|
+
page (Optional[int]): The page number.
|
|
887
|
+
|
|
888
|
+
Returns:
|
|
889
|
+
Tuple[List[Dict[str, Any]], int]: A list of dictionaries containing user stats and total count.
|
|
890
|
+
|
|
891
|
+
Example:
|
|
892
|
+
(
|
|
893
|
+
[
|
|
894
|
+
{
|
|
895
|
+
"user_id": "123",
|
|
896
|
+
"total_memories": 10,
|
|
897
|
+
"last_memory_updated_at": 1714560000,
|
|
898
|
+
},
|
|
899
|
+
],
|
|
900
|
+
total_count: 1,
|
|
901
|
+
)
|
|
902
|
+
"""
|
|
903
|
+
try:
|
|
904
|
+
table = self._get_table(table_type="memories")
|
|
905
|
+
if table is None:
|
|
906
|
+
return [], 0
|
|
907
|
+
|
|
908
|
+
with self.Session() as sess, sess.begin():
|
|
909
|
+
stmt = (
|
|
910
|
+
select(
|
|
911
|
+
table.c.user_id,
|
|
912
|
+
func.count(table.c.memory_id).label("total_memories"),
|
|
913
|
+
func.max(table.c.updated_at).label("last_memory_updated_at"),
|
|
914
|
+
)
|
|
915
|
+
.where(table.c.user_id.is_not(None))
|
|
916
|
+
.group_by(table.c.user_id)
|
|
917
|
+
.order_by(func.max(table.c.updated_at).desc())
|
|
918
|
+
)
|
|
919
|
+
|
|
920
|
+
count_stmt = select(func.count()).select_from(stmt.alias())
|
|
921
|
+
total_count = sess.execute(count_stmt).scalar()
|
|
922
|
+
|
|
923
|
+
# Pagination
|
|
924
|
+
if limit is not None:
|
|
925
|
+
stmt = stmt.limit(limit)
|
|
926
|
+
if page is not None:
|
|
927
|
+
stmt = stmt.offset((page - 1) * limit)
|
|
928
|
+
|
|
929
|
+
result = sess.execute(stmt).fetchall()
|
|
930
|
+
if not result:
|
|
931
|
+
return [], 0
|
|
932
|
+
|
|
933
|
+
return [
|
|
934
|
+
{
|
|
935
|
+
"user_id": record.user_id, # type: ignore
|
|
936
|
+
"total_memories": record.total_memories,
|
|
937
|
+
"last_memory_updated_at": record.last_memory_updated_at,
|
|
938
|
+
}
|
|
939
|
+
for record in result
|
|
940
|
+
], total_count
|
|
941
|
+
|
|
942
|
+
except Exception as e:
|
|
943
|
+
log_error(f"Error getting user memory stats: {e}")
|
|
944
|
+
return [], 0
|
|
945
|
+
|
|
946
|
+
def upsert_user_memory(
|
|
947
|
+
self, memory: UserMemory, deserialize: Optional[bool] = True
|
|
948
|
+
) -> Optional[Union[UserMemory, Dict[str, Any]]]:
|
|
949
|
+
"""Upsert a user memory in the database.
|
|
950
|
+
|
|
951
|
+
Args:
|
|
952
|
+
memory (UserMemory): The user memory to upsert.
|
|
953
|
+
deserialize (Optional[bool]): Whether to serialize the memory. Defaults to True.
|
|
954
|
+
|
|
955
|
+
Returns:
|
|
956
|
+
Optional[Union[UserMemory, Dict[str, Any]]]:
|
|
957
|
+
- When deserialize=True: UserMemory object
|
|
958
|
+
- When deserialize=False: UserMemory dictionary
|
|
959
|
+
|
|
960
|
+
Raises:
|
|
961
|
+
Exception: If an error occurs during upsert.
|
|
962
|
+
"""
|
|
963
|
+
try:
|
|
964
|
+
table = self._get_table(table_type="memories", create_table_if_not_found=True)
|
|
965
|
+
if table is None:
|
|
966
|
+
return None
|
|
967
|
+
|
|
968
|
+
if memory.memory_id is None:
|
|
969
|
+
memory.memory_id = str(uuid4())
|
|
970
|
+
|
|
971
|
+
with self.Session() as sess, sess.begin():
|
|
972
|
+
stmt = sqlite.insert(table).values(
|
|
973
|
+
user_id=memory.user_id,
|
|
974
|
+
agent_id=memory.agent_id,
|
|
975
|
+
team_id=memory.team_id,
|
|
976
|
+
memory_id=memory.memory_id,
|
|
977
|
+
memory=memory.memory,
|
|
978
|
+
topics=memory.topics,
|
|
979
|
+
input=memory.input,
|
|
980
|
+
updated_at=int(time.time()),
|
|
981
|
+
)
|
|
982
|
+
stmt = stmt.on_conflict_do_update( # type: ignore
|
|
983
|
+
index_elements=["memory_id"],
|
|
984
|
+
set_=dict(
|
|
985
|
+
memory=memory.memory,
|
|
986
|
+
topics=memory.topics,
|
|
987
|
+
input=memory.input,
|
|
988
|
+
updated_at=int(time.time()),
|
|
989
|
+
),
|
|
990
|
+
).returning(table)
|
|
991
|
+
|
|
992
|
+
result = sess.execute(stmt)
|
|
993
|
+
row = result.fetchone()
|
|
994
|
+
|
|
995
|
+
if row is None:
|
|
996
|
+
return None
|
|
997
|
+
|
|
998
|
+
log_debug(f"Upserted user memory with id '{memory.memory_id}'")
|
|
999
|
+
|
|
1000
|
+
memory_raw = row._mapping
|
|
1001
|
+
if not memory_raw or not deserialize:
|
|
1002
|
+
return memory_raw
|
|
1003
|
+
|
|
1004
|
+
return UserMemory.from_dict(memory_raw)
|
|
1005
|
+
|
|
1006
|
+
except Exception as e:
|
|
1007
|
+
log_error(f"Error upserting user memory: {e}")
|
|
1008
|
+
return None
|
|
1009
|
+
|
|
1010
|
+
def clear_memories(self) -> None:
|
|
1011
|
+
"""Delete all memories from the database.
|
|
1012
|
+
|
|
1013
|
+
Raises:
|
|
1014
|
+
Exception: If an error occurs during deletion.
|
|
1015
|
+
"""
|
|
1016
|
+
try:
|
|
1017
|
+
table = self._get_table(table_type="memories")
|
|
1018
|
+
if table is None:
|
|
1019
|
+
return
|
|
1020
|
+
|
|
1021
|
+
with self.Session() as sess, sess.begin():
|
|
1022
|
+
sess.execute(table.delete())
|
|
1023
|
+
|
|
1024
|
+
except Exception as e:
|
|
1025
|
+
from agno.utils.log import log_warning
|
|
1026
|
+
|
|
1027
|
+
log_warning(f"Exception deleting all memories: {e}")
|
|
1028
|
+
|
|
1029
|
+
# -- Metrics methods --
|
|
1030
|
+
|
|
1031
|
+
def _get_all_sessions_for_metrics_calculation(
|
|
1032
|
+
self, start_timestamp: Optional[int] = None, end_timestamp: Optional[int] = None
|
|
1033
|
+
) -> List[Dict[str, Any]]:
|
|
1034
|
+
"""
|
|
1035
|
+
Get all sessions of all types (agent, team, workflow) as raw dictionaries.
|
|
1036
|
+
|
|
1037
|
+
Args:
|
|
1038
|
+
start_timestamp (Optional[int]): The start timestamp to filter by. Defaults to None.
|
|
1039
|
+
end_timestamp (Optional[int]): The end timestamp to filter by. Defaults to None.
|
|
1040
|
+
|
|
1041
|
+
Returns:
|
|
1042
|
+
List[Dict[str, Any]]: List of session dictionaries with session_type field.
|
|
1043
|
+
|
|
1044
|
+
Raises:
|
|
1045
|
+
Exception: If an error occurs during retrieval.
|
|
1046
|
+
"""
|
|
1047
|
+
try:
|
|
1048
|
+
table = self._get_table(table_type="sessions")
|
|
1049
|
+
if table is None:
|
|
1050
|
+
return []
|
|
1051
|
+
|
|
1052
|
+
stmt = select(
|
|
1053
|
+
table.c.user_id,
|
|
1054
|
+
table.c.session_data,
|
|
1055
|
+
table.c.runs,
|
|
1056
|
+
table.c.created_at,
|
|
1057
|
+
table.c.session_type,
|
|
1058
|
+
)
|
|
1059
|
+
|
|
1060
|
+
if start_timestamp is not None:
|
|
1061
|
+
stmt = stmt.where(table.c.created_at >= start_timestamp)
|
|
1062
|
+
if end_timestamp is not None:
|
|
1063
|
+
stmt = stmt.where(table.c.created_at <= end_timestamp)
|
|
1064
|
+
|
|
1065
|
+
with self.Session() as sess:
|
|
1066
|
+
result = sess.execute(stmt).fetchall()
|
|
1067
|
+
return [record._mapping for record in result]
|
|
1068
|
+
|
|
1069
|
+
except Exception as e:
|
|
1070
|
+
log_error(f"Error reading from sessions table: {e}")
|
|
1071
|
+
return []
|
|
1072
|
+
|
|
1073
|
+
def _get_metrics_calculation_starting_date(self, table: Table) -> Optional[date]:
|
|
1074
|
+
"""Get the first date for which metrics calculation is needed:
|
|
1075
|
+
|
|
1076
|
+
1. If there are metrics records, return the date of the first day without a complete metrics record.
|
|
1077
|
+
2. If there are no metrics records, return the date of the first recorded session.
|
|
1078
|
+
3. If there are no metrics records and no sessions records, return None.
|
|
1079
|
+
|
|
1080
|
+
Args:
|
|
1081
|
+
table (Table): The table to get the starting date for.
|
|
1082
|
+
|
|
1083
|
+
Returns:
|
|
1084
|
+
Optional[date]: The starting date for which metrics calculation is needed.
|
|
1085
|
+
"""
|
|
1086
|
+
with self.Session() as sess:
|
|
1087
|
+
stmt = select(table).order_by(table.c.date.desc()).limit(1)
|
|
1088
|
+
result = sess.execute(stmt).fetchone()
|
|
1089
|
+
|
|
1090
|
+
# 1. Return the date of the first day without a complete metrics record.
|
|
1091
|
+
if result is not None:
|
|
1092
|
+
if result.completed:
|
|
1093
|
+
return result._mapping["date"] + timedelta(days=1)
|
|
1094
|
+
else:
|
|
1095
|
+
return result._mapping["date"]
|
|
1096
|
+
|
|
1097
|
+
# 2. No metrics records. Return the date of the first recorded session.
|
|
1098
|
+
first_session, _ = self.get_sessions(sort_by="created_at", sort_order="asc", limit=1, deserialize=False)
|
|
1099
|
+
first_session_date = first_session[0]["created_at"] if first_session else None # type: ignore
|
|
1100
|
+
|
|
1101
|
+
# 3. No metrics records and no sessions records. Return None.
|
|
1102
|
+
if not first_session_date:
|
|
1103
|
+
return None
|
|
1104
|
+
|
|
1105
|
+
return datetime.fromtimestamp(first_session_date, tz=timezone.utc).date()
|
|
1106
|
+
|
|
1107
|
+
def calculate_metrics(self) -> Optional[list[dict]]:
|
|
1108
|
+
"""Calculate metrics for all dates without complete metrics.
|
|
1109
|
+
|
|
1110
|
+
Returns:
|
|
1111
|
+
Optional[list[dict]]: The calculated metrics.
|
|
1112
|
+
|
|
1113
|
+
Raises:
|
|
1114
|
+
Exception: If an error occurs during metrics calculation.
|
|
1115
|
+
"""
|
|
1116
|
+
try:
|
|
1117
|
+
table = self._get_table(table_type="metrics", create_table_if_not_found=True)
|
|
1118
|
+
if table is None:
|
|
1119
|
+
return None
|
|
1120
|
+
|
|
1121
|
+
starting_date = self._get_metrics_calculation_starting_date(table)
|
|
1122
|
+
if starting_date is None:
|
|
1123
|
+
log_info("No session data found. Won't calculate metrics.")
|
|
1124
|
+
return None
|
|
1125
|
+
|
|
1126
|
+
dates_to_process = get_dates_to_calculate_metrics_for(starting_date)
|
|
1127
|
+
if not dates_to_process:
|
|
1128
|
+
log_info("Metrics already calculated for all relevant dates.")
|
|
1129
|
+
return None
|
|
1130
|
+
|
|
1131
|
+
start_timestamp = int(
|
|
1132
|
+
datetime.combine(dates_to_process[0], datetime.min.time()).replace(tzinfo=timezone.utc).timestamp()
|
|
1133
|
+
)
|
|
1134
|
+
end_timestamp = int(
|
|
1135
|
+
datetime.combine(dates_to_process[-1] + timedelta(days=1), datetime.min.time())
|
|
1136
|
+
.replace(tzinfo=timezone.utc)
|
|
1137
|
+
.timestamp()
|
|
1138
|
+
)
|
|
1139
|
+
|
|
1140
|
+
sessions = self._get_all_sessions_for_metrics_calculation(
|
|
1141
|
+
start_timestamp=start_timestamp, end_timestamp=end_timestamp
|
|
1142
|
+
)
|
|
1143
|
+
all_sessions_data = fetch_all_sessions_data(
|
|
1144
|
+
sessions=sessions, dates_to_process=dates_to_process, start_timestamp=start_timestamp
|
|
1145
|
+
)
|
|
1146
|
+
if not all_sessions_data:
|
|
1147
|
+
log_info("No new session data found. Won't calculate metrics.")
|
|
1148
|
+
return None
|
|
1149
|
+
|
|
1150
|
+
results = []
|
|
1151
|
+
metrics_records = []
|
|
1152
|
+
|
|
1153
|
+
for date_to_process in dates_to_process:
|
|
1154
|
+
date_key = date_to_process.isoformat()
|
|
1155
|
+
sessions_for_date = all_sessions_data.get(date_key, {})
|
|
1156
|
+
|
|
1157
|
+
# Skip dates with no sessions
|
|
1158
|
+
if not any(len(sessions) > 0 for sessions in sessions_for_date.values()):
|
|
1159
|
+
continue
|
|
1160
|
+
|
|
1161
|
+
metrics_record = calculate_date_metrics(date_to_process, sessions_for_date)
|
|
1162
|
+
metrics_records.append(metrics_record)
|
|
1163
|
+
|
|
1164
|
+
if metrics_records:
|
|
1165
|
+
with self.Session() as sess, sess.begin():
|
|
1166
|
+
results = bulk_upsert_metrics(session=sess, table=table, metrics_records=metrics_records)
|
|
1167
|
+
|
|
1168
|
+
log_debug("Updated metrics calculations")
|
|
1169
|
+
|
|
1170
|
+
return results
|
|
1171
|
+
|
|
1172
|
+
except Exception as e:
|
|
1173
|
+
log_error(f"Error refreshing metrics: {e}")
|
|
1174
|
+
raise e
|
|
1175
|
+
|
|
1176
|
+
def get_metrics(
|
|
1177
|
+
self,
|
|
1178
|
+
starting_date: Optional[date] = None,
|
|
1179
|
+
ending_date: Optional[date] = None,
|
|
1180
|
+
) -> Tuple[List[dict], Optional[int]]:
|
|
1181
|
+
"""Get all metrics matching the given date range.
|
|
1182
|
+
|
|
1183
|
+
Args:
|
|
1184
|
+
starting_date (Optional[date]): The starting date to filter metrics by.
|
|
1185
|
+
ending_date (Optional[date]): The ending date to filter metrics by.
|
|
1186
|
+
|
|
1187
|
+
Returns:
|
|
1188
|
+
Tuple[List[dict], Optional[int]]: A tuple containing the metrics and the timestamp of the latest update.
|
|
1189
|
+
|
|
1190
|
+
Raises:
|
|
1191
|
+
Exception: If an error occurs during retrieval.
|
|
1192
|
+
"""
|
|
1193
|
+
try:
|
|
1194
|
+
table = self._get_table(table_type="metrics", create_table_if_not_found=True)
|
|
1195
|
+
if table is None:
|
|
1196
|
+
return [], None
|
|
1197
|
+
|
|
1198
|
+
with self.Session() as sess, sess.begin():
|
|
1199
|
+
stmt = select(table)
|
|
1200
|
+
if starting_date:
|
|
1201
|
+
stmt = stmt.where(table.c.date >= starting_date)
|
|
1202
|
+
if ending_date:
|
|
1203
|
+
stmt = stmt.where(table.c.date <= ending_date)
|
|
1204
|
+
result = sess.execute(stmt).fetchall()
|
|
1205
|
+
if not result:
|
|
1206
|
+
return [], None
|
|
1207
|
+
|
|
1208
|
+
# Get the latest updated_at
|
|
1209
|
+
latest_stmt = select(func.max(table.c.updated_at))
|
|
1210
|
+
latest_updated_at = sess.execute(latest_stmt).scalar()
|
|
1211
|
+
|
|
1212
|
+
return [row._mapping for row in result], latest_updated_at
|
|
1213
|
+
|
|
1214
|
+
except Exception as e:
|
|
1215
|
+
log_error(f"Error getting metrics: {e}")
|
|
1216
|
+
return [], None
|
|
1217
|
+
|
|
1218
|
+
# -- Knowledge methods --
|
|
1219
|
+
|
|
1220
|
+
def delete_knowledge_content(self, id: str):
|
|
1221
|
+
"""Delete a knowledge row from the database.
|
|
1222
|
+
|
|
1223
|
+
Args:
|
|
1224
|
+
id (str): The ID of the knowledge row to delete.
|
|
1225
|
+
|
|
1226
|
+
Raises:
|
|
1227
|
+
Exception: If an error occurs during deletion.
|
|
1228
|
+
"""
|
|
1229
|
+
table = self._get_table(table_type="knowledge")
|
|
1230
|
+
if table is None:
|
|
1231
|
+
return
|
|
1232
|
+
|
|
1233
|
+
try:
|
|
1234
|
+
with self.Session() as sess, sess.begin():
|
|
1235
|
+
stmt = table.delete().where(table.c.id == id)
|
|
1236
|
+
sess.execute(stmt)
|
|
1237
|
+
|
|
1238
|
+
except Exception as e:
|
|
1239
|
+
log_error(f"Error deleting knowledge content: {e}")
|
|
1240
|
+
|
|
1241
|
+
def get_knowledge_content(self, id: str) -> Optional[KnowledgeRow]:
|
|
1242
|
+
"""Get a knowledge row from the database.
|
|
1243
|
+
|
|
1244
|
+
Args:
|
|
1245
|
+
id (str): The ID of the knowledge row to get.
|
|
1246
|
+
|
|
1247
|
+
Returns:
|
|
1248
|
+
Optional[KnowledgeRow]: The knowledge row, or None if it doesn't exist.
|
|
1249
|
+
|
|
1250
|
+
Raises:
|
|
1251
|
+
Exception: If an error occurs during retrieval.
|
|
1252
|
+
"""
|
|
1253
|
+
table = self._get_table(table_type="knowledge")
|
|
1254
|
+
if table is None:
|
|
1255
|
+
return None
|
|
1256
|
+
|
|
1257
|
+
try:
|
|
1258
|
+
with self.Session() as sess, sess.begin():
|
|
1259
|
+
stmt = select(table).where(table.c.id == id)
|
|
1260
|
+
result = sess.execute(stmt).fetchone()
|
|
1261
|
+
if result is None:
|
|
1262
|
+
return None
|
|
1263
|
+
|
|
1264
|
+
return KnowledgeRow.model_validate(result._mapping)
|
|
1265
|
+
|
|
1266
|
+
except Exception as e:
|
|
1267
|
+
log_error(f"Error getting knowledge content: {e}")
|
|
1268
|
+
return None
|
|
1269
|
+
|
|
1270
|
+
def get_knowledge_contents(
|
|
1271
|
+
self,
|
|
1272
|
+
limit: Optional[int] = None,
|
|
1273
|
+
page: Optional[int] = None,
|
|
1274
|
+
sort_by: Optional[str] = None,
|
|
1275
|
+
sort_order: Optional[str] = None,
|
|
1276
|
+
) -> Tuple[List[KnowledgeRow], int]:
|
|
1277
|
+
"""Get all knowledge contents from the database.
|
|
1278
|
+
|
|
1279
|
+
Args:
|
|
1280
|
+
limit (Optional[int]): The maximum number of knowledge contents to return.
|
|
1281
|
+
page (Optional[int]): The page number.
|
|
1282
|
+
sort_by (Optional[str]): The column to sort by.
|
|
1283
|
+
sort_order (Optional[str]): The order to sort by.
|
|
1284
|
+
|
|
1285
|
+
Returns:
|
|
1286
|
+
Tuple[List[KnowledgeRow], int]: The knowledge contents and total count.
|
|
1287
|
+
|
|
1288
|
+
Raises:
|
|
1289
|
+
Exception: If an error occurs during retrieval.
|
|
1290
|
+
"""
|
|
1291
|
+
table = self._get_table(table_type="knowledge")
|
|
1292
|
+
if table is None:
|
|
1293
|
+
return [], 0
|
|
1294
|
+
|
|
1295
|
+
try:
|
|
1296
|
+
with self.Session() as sess, sess.begin():
|
|
1297
|
+
stmt = select(table)
|
|
1298
|
+
|
|
1299
|
+
# Apply sorting
|
|
1300
|
+
if sort_by is not None:
|
|
1301
|
+
stmt = stmt.order_by(getattr(table.c, sort_by) * (1 if sort_order == "asc" else -1))
|
|
1302
|
+
|
|
1303
|
+
# Get total count before applying limit and pagination
|
|
1304
|
+
count_stmt = select(func.count()).select_from(stmt.alias())
|
|
1305
|
+
total_count = sess.execute(count_stmt).scalar()
|
|
1306
|
+
|
|
1307
|
+
# Apply pagination after count
|
|
1308
|
+
if limit is not None:
|
|
1309
|
+
stmt = stmt.limit(limit)
|
|
1310
|
+
if page is not None:
|
|
1311
|
+
stmt = stmt.offset((page - 1) * limit)
|
|
1312
|
+
|
|
1313
|
+
result = sess.execute(stmt).fetchall()
|
|
1314
|
+
return [KnowledgeRow.model_validate(record._mapping) for record in result], total_count
|
|
1315
|
+
|
|
1316
|
+
except Exception as e:
|
|
1317
|
+
log_error(f"Error getting knowledge contents: {e}")
|
|
1318
|
+
return [], 0
|
|
1319
|
+
|
|
1320
|
+
def upsert_knowledge_content(self, knowledge_row: KnowledgeRow):
|
|
1321
|
+
"""Upsert knowledge content in the database.
|
|
1322
|
+
|
|
1323
|
+
Args:
|
|
1324
|
+
knowledge_row (KnowledgeRow): The knowledge row to upsert.
|
|
1325
|
+
|
|
1326
|
+
Returns:
|
|
1327
|
+
Optional[KnowledgeRow]: The upserted knowledge row, or None if the operation fails.
|
|
1328
|
+
"""
|
|
1329
|
+
try:
|
|
1330
|
+
table = self._get_table(table_type="knowledge", create_table_if_not_found=True)
|
|
1331
|
+
if table is None:
|
|
1332
|
+
return None
|
|
1333
|
+
|
|
1334
|
+
with self.Session() as sess, sess.begin():
|
|
1335
|
+
update_fields = {
|
|
1336
|
+
k: v
|
|
1337
|
+
for k, v in {
|
|
1338
|
+
"name": knowledge_row.name,
|
|
1339
|
+
"description": knowledge_row.description,
|
|
1340
|
+
"metadata": knowledge_row.metadata,
|
|
1341
|
+
"type": knowledge_row.type,
|
|
1342
|
+
"size": knowledge_row.size,
|
|
1343
|
+
"linked_to": knowledge_row.linked_to,
|
|
1344
|
+
"access_count": knowledge_row.access_count,
|
|
1345
|
+
"status": knowledge_row.status,
|
|
1346
|
+
"created_at": knowledge_row.created_at,
|
|
1347
|
+
"updated_at": knowledge_row.updated_at,
|
|
1348
|
+
"external_id": knowledge_row.external_id,
|
|
1349
|
+
}.items()
|
|
1350
|
+
# Filtering out None fields if updating
|
|
1351
|
+
if v is not None
|
|
1352
|
+
}
|
|
1353
|
+
|
|
1354
|
+
stmt = (
|
|
1355
|
+
sqlite.insert(table)
|
|
1356
|
+
.values(knowledge_row.model_dump())
|
|
1357
|
+
.on_conflict_do_update(index_elements=["id"], set_=update_fields)
|
|
1358
|
+
)
|
|
1359
|
+
sess.execute(stmt)
|
|
1360
|
+
|
|
1361
|
+
log_debug(f"Upserted knowledge content with id '{knowledge_row.id}'")
|
|
1362
|
+
|
|
1363
|
+
return knowledge_row
|
|
1364
|
+
|
|
1365
|
+
except Exception as e:
|
|
1366
|
+
log_error(f"Error upserting knowledge content: {e}")
|
|
1367
|
+
return None
|
|
1368
|
+
|
|
1369
|
+
# -- Eval methods --
|
|
1370
|
+
|
|
1371
|
+
def create_eval_run(self, eval_run: EvalRunRecord) -> Optional[EvalRunRecord]:
|
|
1372
|
+
"""Create an EvalRunRecord in the database.
|
|
1373
|
+
|
|
1374
|
+
Args:
|
|
1375
|
+
eval_run (EvalRunRecord): The eval run to create.
|
|
1376
|
+
|
|
1377
|
+
Returns:
|
|
1378
|
+
Optional[EvalRunRecord]: The created eval run, or None if the operation fails.
|
|
1379
|
+
|
|
1380
|
+
Raises:
|
|
1381
|
+
Exception: If an error occurs during creation.
|
|
1382
|
+
"""
|
|
1383
|
+
try:
|
|
1384
|
+
table = self._get_table(table_type="evals", create_table_if_not_found=True)
|
|
1385
|
+
if table is None:
|
|
1386
|
+
return None
|
|
1387
|
+
|
|
1388
|
+
with self.Session() as sess, sess.begin():
|
|
1389
|
+
current_time = int(time.time())
|
|
1390
|
+
stmt = sqlite.insert(table).values(
|
|
1391
|
+
{"created_at": current_time, "updated_at": current_time, **eval_run.model_dump()}
|
|
1392
|
+
)
|
|
1393
|
+
sess.execute(stmt)
|
|
1394
|
+
sess.commit()
|
|
1395
|
+
|
|
1396
|
+
log_debug(f"Created eval run with id '{eval_run.run_id}'")
|
|
1397
|
+
|
|
1398
|
+
return eval_run
|
|
1399
|
+
|
|
1400
|
+
except Exception as e:
|
|
1401
|
+
log_error(f"Error creating eval run: {e}")
|
|
1402
|
+
return None
|
|
1403
|
+
|
|
1404
|
+
def delete_eval_run(self, eval_run_id: str) -> None:
|
|
1405
|
+
"""Delete an eval run from the database.
|
|
1406
|
+
|
|
1407
|
+
Args:
|
|
1408
|
+
eval_run_id (str): The ID of the eval run to delete.
|
|
1409
|
+
"""
|
|
1410
|
+
try:
|
|
1411
|
+
table = self._get_table(table_type="evals")
|
|
1412
|
+
if table is None:
|
|
1413
|
+
return
|
|
1414
|
+
|
|
1415
|
+
with self.Session() as sess, sess.begin():
|
|
1416
|
+
stmt = table.delete().where(table.c.run_id == eval_run_id)
|
|
1417
|
+
result = sess.execute(stmt)
|
|
1418
|
+
if result.rowcount == 0:
|
|
1419
|
+
log_warning(f"No eval run found with ID: {eval_run_id}")
|
|
1420
|
+
else:
|
|
1421
|
+
log_debug(f"Deleted eval run with ID: {eval_run_id}")
|
|
1422
|
+
|
|
1423
|
+
except Exception as e:
|
|
1424
|
+
log_error(f"Error deleting eval run {eval_run_id}: {e}")
|
|
1425
|
+
raise
|
|
1426
|
+
|
|
1427
|
+
def delete_eval_runs(self, eval_run_ids: List[str]) -> None:
|
|
1428
|
+
"""Delete multiple eval runs from the database.
|
|
1429
|
+
|
|
1430
|
+
Args:
|
|
1431
|
+
eval_run_ids (List[str]): List of eval run IDs to delete.
|
|
1432
|
+
"""
|
|
1433
|
+
try:
|
|
1434
|
+
table = self._get_table(table_type="evals")
|
|
1435
|
+
if table is None:
|
|
1436
|
+
return
|
|
1437
|
+
|
|
1438
|
+
with self.Session() as sess, sess.begin():
|
|
1439
|
+
stmt = table.delete().where(table.c.run_id.in_(eval_run_ids))
|
|
1440
|
+
result = sess.execute(stmt)
|
|
1441
|
+
if result.rowcount == 0:
|
|
1442
|
+
log_debug(f"No eval runs found with IDs: {eval_run_ids}")
|
|
1443
|
+
else:
|
|
1444
|
+
log_debug(f"Deleted {result.rowcount} eval runs")
|
|
1445
|
+
|
|
1446
|
+
except Exception as e:
|
|
1447
|
+
log_error(f"Error deleting eval runs {eval_run_ids}: {e}")
|
|
1448
|
+
raise
|
|
1449
|
+
|
|
1450
|
+
def get_eval_run(
|
|
1451
|
+
self, eval_run_id: str, deserialize: Optional[bool] = True
|
|
1452
|
+
) -> Optional[Union[EvalRunRecord, Dict[str, Any]]]:
|
|
1453
|
+
"""Get an eval run from the database.
|
|
1454
|
+
|
|
1455
|
+
Args:
|
|
1456
|
+
eval_run_id (str): The ID of the eval run to get.
|
|
1457
|
+
deserialize (Optional[bool]): Whether to serialize the eval run. Defaults to True.
|
|
1458
|
+
|
|
1459
|
+
Returns:
|
|
1460
|
+
Optional[Union[EvalRunRecord, Dict[str, Any]]]:
|
|
1461
|
+
- When deserialize=True: EvalRunRecord object
|
|
1462
|
+
- When deserialize=False: EvalRun dictionary
|
|
1463
|
+
|
|
1464
|
+
Raises:
|
|
1465
|
+
Exception: If an error occurs during retrieval.
|
|
1466
|
+
"""
|
|
1467
|
+
try:
|
|
1468
|
+
table = self._get_table(table_type="evals")
|
|
1469
|
+
if table is None:
|
|
1470
|
+
return None
|
|
1471
|
+
|
|
1472
|
+
with self.Session() as sess, sess.begin():
|
|
1473
|
+
stmt = select(table).where(table.c.run_id == eval_run_id)
|
|
1474
|
+
result = sess.execute(stmt).fetchone()
|
|
1475
|
+
if result is None:
|
|
1476
|
+
return None
|
|
1477
|
+
|
|
1478
|
+
eval_run_raw = result._mapping
|
|
1479
|
+
if not eval_run_raw or not deserialize:
|
|
1480
|
+
return eval_run_raw
|
|
1481
|
+
|
|
1482
|
+
return EvalRunRecord.model_validate(eval_run_raw)
|
|
1483
|
+
|
|
1484
|
+
except Exception as e:
|
|
1485
|
+
log_error(f"Exception getting eval run {eval_run_id}: {e}")
|
|
1486
|
+
return None
|
|
1487
|
+
|
|
1488
|
+
def get_eval_runs(
|
|
1489
|
+
self,
|
|
1490
|
+
limit: Optional[int] = None,
|
|
1491
|
+
page: Optional[int] = None,
|
|
1492
|
+
sort_by: Optional[str] = None,
|
|
1493
|
+
sort_order: Optional[str] = None,
|
|
1494
|
+
agent_id: Optional[str] = None,
|
|
1495
|
+
team_id: Optional[str] = None,
|
|
1496
|
+
workflow_id: Optional[str] = None,
|
|
1497
|
+
model_id: Optional[str] = None,
|
|
1498
|
+
filter_type: Optional[EvalFilterType] = None,
|
|
1499
|
+
eval_type: Optional[List[EvalType]] = None,
|
|
1500
|
+
deserialize: Optional[bool] = True,
|
|
1501
|
+
) -> Union[List[EvalRunRecord], Tuple[List[Dict[str, Any]], int]]:
|
|
1502
|
+
"""Get all eval runs from the database.
|
|
1503
|
+
|
|
1504
|
+
Args:
|
|
1505
|
+
limit (Optional[int]): The maximum number of eval runs to return.
|
|
1506
|
+
page (Optional[int]): The page number.
|
|
1507
|
+
sort_by (Optional[str]): The column to sort by.
|
|
1508
|
+
sort_order (Optional[str]): The order to sort by.
|
|
1509
|
+
agent_id (Optional[str]): The ID of the agent to filter by.
|
|
1510
|
+
team_id (Optional[str]): The ID of the team to filter by.
|
|
1511
|
+
workflow_id (Optional[str]): The ID of the workflow to filter by.
|
|
1512
|
+
model_id (Optional[str]): The ID of the model to filter by.
|
|
1513
|
+
eval_type (Optional[List[EvalType]]): The type(s) of eval to filter by.
|
|
1514
|
+
filter_type (Optional[EvalFilterType]): Filter by component type (agent, team, workflow).
|
|
1515
|
+
deserialize (Optional[bool]): Whether to serialize the eval runs. Defaults to True.
|
|
1516
|
+
create_table_if_not_found (Optional[bool]): Whether to create the table if it doesn't exist.
|
|
1517
|
+
|
|
1518
|
+
Returns:
|
|
1519
|
+
Union[List[EvalRunRecord], Tuple[List[Dict[str, Any]], int]]:
|
|
1520
|
+
- When deserialize=True: List of EvalRunRecord objects
|
|
1521
|
+
- When deserialize=False: List of EvalRun dictionaries and total count
|
|
1522
|
+
|
|
1523
|
+
Raises:
|
|
1524
|
+
Exception: If an error occurs during retrieval.
|
|
1525
|
+
"""
|
|
1526
|
+
try:
|
|
1527
|
+
table = self._get_table(table_type="evals")
|
|
1528
|
+
if table is None:
|
|
1529
|
+
return [] if deserialize else ([], 0)
|
|
1530
|
+
|
|
1531
|
+
with self.Session() as sess, sess.begin():
|
|
1532
|
+
stmt = select(table)
|
|
1533
|
+
|
|
1534
|
+
# Filtering
|
|
1535
|
+
if agent_id is not None:
|
|
1536
|
+
stmt = stmt.where(table.c.agent_id == agent_id)
|
|
1537
|
+
if team_id is not None:
|
|
1538
|
+
stmt = stmt.where(table.c.team_id == team_id)
|
|
1539
|
+
if workflow_id is not None:
|
|
1540
|
+
stmt = stmt.where(table.c.workflow_id == workflow_id)
|
|
1541
|
+
if model_id is not None:
|
|
1542
|
+
stmt = stmt.where(table.c.model_id == model_id)
|
|
1543
|
+
if eval_type is not None and len(eval_type) > 0:
|
|
1544
|
+
stmt = stmt.where(table.c.eval_type.in_(eval_type))
|
|
1545
|
+
if filter_type is not None:
|
|
1546
|
+
if filter_type == EvalFilterType.AGENT:
|
|
1547
|
+
stmt = stmt.where(table.c.agent_id.is_not(None))
|
|
1548
|
+
elif filter_type == EvalFilterType.TEAM:
|
|
1549
|
+
stmt = stmt.where(table.c.team_id.is_not(None))
|
|
1550
|
+
elif filter_type == EvalFilterType.WORKFLOW:
|
|
1551
|
+
stmt = stmt.where(table.c.workflow_id.is_not(None))
|
|
1552
|
+
|
|
1553
|
+
# Get total count after applying filtering
|
|
1554
|
+
count_stmt = select(func.count()).select_from(stmt.alias())
|
|
1555
|
+
total_count = sess.execute(count_stmt).scalar()
|
|
1556
|
+
|
|
1557
|
+
# Sorting - apply default sort by created_at desc if no sort parameters provided
|
|
1558
|
+
if sort_by is None:
|
|
1559
|
+
stmt = stmt.order_by(table.c.created_at.desc())
|
|
1560
|
+
else:
|
|
1561
|
+
stmt = apply_sorting(stmt, table, sort_by, sort_order)
|
|
1562
|
+
# Paginating
|
|
1563
|
+
if limit is not None:
|
|
1564
|
+
stmt = stmt.limit(limit)
|
|
1565
|
+
if page is not None:
|
|
1566
|
+
stmt = stmt.offset((page - 1) * limit)
|
|
1567
|
+
|
|
1568
|
+
result = sess.execute(stmt).fetchall()
|
|
1569
|
+
if not result:
|
|
1570
|
+
return [] if deserialize else ([], 0)
|
|
1571
|
+
|
|
1572
|
+
eval_runs_raw = [row._mapping for row in result]
|
|
1573
|
+
if not deserialize:
|
|
1574
|
+
return eval_runs_raw, total_count
|
|
1575
|
+
|
|
1576
|
+
return [EvalRunRecord.model_validate(row) for row in eval_runs_raw]
|
|
1577
|
+
|
|
1578
|
+
except Exception as e:
|
|
1579
|
+
log_error(f"Exception getting eval runs: {e}")
|
|
1580
|
+
return []
|
|
1581
|
+
|
|
1582
|
+
def rename_eval_run(
|
|
1583
|
+
self, eval_run_id: str, name: str, deserialize: Optional[bool] = True
|
|
1584
|
+
) -> Optional[Union[EvalRunRecord, Dict[str, Any]]]:
|
|
1585
|
+
"""Upsert the name of an eval run in the database, returning raw dictionary.
|
|
1586
|
+
|
|
1587
|
+
Args:
|
|
1588
|
+
eval_run_id (str): The ID of the eval run to update.
|
|
1589
|
+
name (str): The new name of the eval run.
|
|
1590
|
+
deserialize (Optional[bool]): Whether to serialize the eval run. Defaults to True.
|
|
1591
|
+
|
|
1592
|
+
Returns:
|
|
1593
|
+
Optional[Union[EvalRunRecord, Dict[str, Any]]]:
|
|
1594
|
+
- When deserialize=True: EvalRunRecord object
|
|
1595
|
+
- When deserialize=False: EvalRun dictionary
|
|
1596
|
+
|
|
1597
|
+
Raises:
|
|
1598
|
+
Exception: If an error occurs during update.
|
|
1599
|
+
"""
|
|
1600
|
+
try:
|
|
1601
|
+
table = self._get_table(table_type="evals")
|
|
1602
|
+
if table is None:
|
|
1603
|
+
return None
|
|
1604
|
+
|
|
1605
|
+
with self.Session() as sess, sess.begin():
|
|
1606
|
+
stmt = (
|
|
1607
|
+
table.update().where(table.c.run_id == eval_run_id).values(name=name, updated_at=int(time.time()))
|
|
1608
|
+
)
|
|
1609
|
+
sess.execute(stmt)
|
|
1610
|
+
|
|
1611
|
+
eval_run_raw = self.get_eval_run(eval_run_id=eval_run_id, deserialize=deserialize)
|
|
1612
|
+
|
|
1613
|
+
log_debug(f"Renamed eval run with id '{eval_run_id}' to '{name}'")
|
|
1614
|
+
|
|
1615
|
+
if not eval_run_raw or not deserialize:
|
|
1616
|
+
return eval_run_raw
|
|
1617
|
+
|
|
1618
|
+
return EvalRunRecord.model_validate(eval_run_raw)
|
|
1619
|
+
|
|
1620
|
+
except Exception as e:
|
|
1621
|
+
log_error(f"Error renaming eval run {eval_run_id}: {e}")
|
|
1622
|
+
raise
|
|
1623
|
+
|
|
1624
|
+
# -- Migrations --
|
|
1625
|
+
|
|
1626
|
+
def migrate_table_from_v1_to_v2(self, v1_db_schema: str, v1_table_name: str, v1_table_type: str):
|
|
1627
|
+
"""Migrate all content in the given table to the right v2 table"""
|
|
1628
|
+
|
|
1629
|
+
from agno.db.migrations.v1_to_v2 import (
|
|
1630
|
+
get_all_table_content,
|
|
1631
|
+
parse_agent_sessions,
|
|
1632
|
+
parse_memories,
|
|
1633
|
+
parse_team_sessions,
|
|
1634
|
+
parse_workflow_sessions,
|
|
1635
|
+
)
|
|
1636
|
+
|
|
1637
|
+
# Get all content from the old table
|
|
1638
|
+
old_content: list[dict[str, Any]] = get_all_table_content(
|
|
1639
|
+
db=self,
|
|
1640
|
+
db_schema=v1_db_schema,
|
|
1641
|
+
table_name=v1_table_name,
|
|
1642
|
+
)
|
|
1643
|
+
if not old_content:
|
|
1644
|
+
log_info(f"No content to migrate from table {v1_table_name}")
|
|
1645
|
+
return
|
|
1646
|
+
|
|
1647
|
+
# Parse the content into the new format
|
|
1648
|
+
memories: List[UserMemory] = []
|
|
1649
|
+
sessions: Sequence[Union[AgentSession, TeamSession, WorkflowSession]] = []
|
|
1650
|
+
if v1_table_type == "agent_sessions":
|
|
1651
|
+
sessions = parse_agent_sessions(old_content)
|
|
1652
|
+
elif v1_table_type == "team_sessions":
|
|
1653
|
+
sessions = parse_team_sessions(old_content)
|
|
1654
|
+
elif v1_table_type == "workflow_sessions":
|
|
1655
|
+
sessions = parse_workflow_sessions(old_content)
|
|
1656
|
+
elif v1_table_type == "memories":
|
|
1657
|
+
memories = parse_memories(old_content)
|
|
1658
|
+
else:
|
|
1659
|
+
raise ValueError(f"Invalid table type: {v1_table_type}")
|
|
1660
|
+
|
|
1661
|
+
# Insert the new content into the new table
|
|
1662
|
+
if v1_table_type == "agent_sessions":
|
|
1663
|
+
for session in sessions:
|
|
1664
|
+
self.upsert_session(session)
|
|
1665
|
+
log_info(f"Migrated {len(sessions)} Agent sessions to table: {self.session_table}")
|
|
1666
|
+
|
|
1667
|
+
elif v1_table_type == "team_sessions":
|
|
1668
|
+
for session in sessions:
|
|
1669
|
+
self.upsert_session(session)
|
|
1670
|
+
log_info(f"Migrated {len(sessions)} Team sessions to table: {self.session_table}")
|
|
1671
|
+
|
|
1672
|
+
elif v1_table_type == "workflow_sessions":
|
|
1673
|
+
for session in sessions:
|
|
1674
|
+
self.upsert_session(session)
|
|
1675
|
+
log_info(f"Migrated {len(sessions)} Workflow sessions to table: {self.session_table}")
|
|
1676
|
+
|
|
1677
|
+
elif v1_table_type == "memories":
|
|
1678
|
+
for memory in memories:
|
|
1679
|
+
self.upsert_user_memory(memory)
|
|
1680
|
+
log_info(f"Migrated {len(memories)} memories to table: {self.memory_table}")
|