agno 2.2.13__py3-none-any.whl → 2.4.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agno/agent/__init__.py +6 -0
- agno/agent/agent.py +5252 -3145
- agno/agent/remote.py +525 -0
- agno/api/api.py +2 -0
- agno/client/__init__.py +3 -0
- agno/client/a2a/__init__.py +10 -0
- agno/client/a2a/client.py +554 -0
- agno/client/a2a/schemas.py +112 -0
- agno/client/a2a/utils.py +369 -0
- agno/client/os.py +2669 -0
- agno/compression/__init__.py +3 -0
- agno/compression/manager.py +247 -0
- agno/culture/manager.py +2 -2
- agno/db/base.py +927 -6
- agno/db/dynamo/dynamo.py +788 -2
- agno/db/dynamo/schemas.py +128 -0
- agno/db/dynamo/utils.py +26 -3
- agno/db/firestore/firestore.py +674 -50
- agno/db/firestore/schemas.py +41 -0
- agno/db/firestore/utils.py +25 -10
- agno/db/gcs_json/gcs_json_db.py +506 -3
- agno/db/gcs_json/utils.py +14 -2
- agno/db/in_memory/in_memory_db.py +203 -4
- agno/db/in_memory/utils.py +14 -2
- agno/db/json/json_db.py +498 -2
- agno/db/json/utils.py +14 -2
- agno/db/migrations/manager.py +199 -0
- agno/db/migrations/utils.py +19 -0
- agno/db/migrations/v1_to_v2.py +54 -16
- agno/db/migrations/versions/__init__.py +0 -0
- agno/db/migrations/versions/v2_3_0.py +977 -0
- agno/db/mongo/async_mongo.py +1013 -39
- agno/db/mongo/mongo.py +684 -4
- agno/db/mongo/schemas.py +48 -0
- agno/db/mongo/utils.py +17 -0
- agno/db/mysql/__init__.py +2 -1
- agno/db/mysql/async_mysql.py +2958 -0
- agno/db/mysql/mysql.py +722 -53
- agno/db/mysql/schemas.py +77 -11
- agno/db/mysql/utils.py +151 -8
- agno/db/postgres/async_postgres.py +1254 -137
- agno/db/postgres/postgres.py +2316 -93
- agno/db/postgres/schemas.py +153 -21
- agno/db/postgres/utils.py +22 -7
- agno/db/redis/redis.py +531 -3
- agno/db/redis/schemas.py +36 -0
- agno/db/redis/utils.py +31 -15
- agno/db/schemas/evals.py +1 -0
- agno/db/schemas/memory.py +20 -9
- agno/db/singlestore/schemas.py +70 -1
- agno/db/singlestore/singlestore.py +737 -74
- agno/db/singlestore/utils.py +13 -3
- agno/db/sqlite/async_sqlite.py +1069 -89
- agno/db/sqlite/schemas.py +133 -1
- agno/db/sqlite/sqlite.py +2203 -165
- agno/db/sqlite/utils.py +21 -11
- agno/db/surrealdb/models.py +25 -0
- agno/db/surrealdb/surrealdb.py +603 -1
- agno/db/utils.py +60 -0
- agno/eval/__init__.py +26 -3
- agno/eval/accuracy.py +25 -12
- agno/eval/agent_as_judge.py +871 -0
- agno/eval/base.py +29 -0
- agno/eval/performance.py +10 -4
- agno/eval/reliability.py +22 -13
- agno/eval/utils.py +2 -1
- agno/exceptions.py +42 -0
- agno/hooks/__init__.py +3 -0
- agno/hooks/decorator.py +164 -0
- agno/integrations/discord/client.py +13 -2
- agno/knowledge/__init__.py +4 -0
- agno/knowledge/chunking/code.py +90 -0
- agno/knowledge/chunking/document.py +65 -4
- agno/knowledge/chunking/fixed.py +4 -1
- agno/knowledge/chunking/markdown.py +102 -11
- agno/knowledge/chunking/recursive.py +2 -2
- agno/knowledge/chunking/semantic.py +130 -48
- agno/knowledge/chunking/strategy.py +18 -0
- agno/knowledge/embedder/azure_openai.py +0 -1
- agno/knowledge/embedder/google.py +1 -1
- agno/knowledge/embedder/mistral.py +1 -1
- agno/knowledge/embedder/nebius.py +1 -1
- agno/knowledge/embedder/openai.py +16 -12
- agno/knowledge/filesystem.py +412 -0
- agno/knowledge/knowledge.py +4261 -1199
- agno/knowledge/protocol.py +134 -0
- agno/knowledge/reader/arxiv_reader.py +3 -2
- agno/knowledge/reader/base.py +9 -7
- agno/knowledge/reader/csv_reader.py +91 -42
- agno/knowledge/reader/docx_reader.py +9 -10
- agno/knowledge/reader/excel_reader.py +225 -0
- agno/knowledge/reader/field_labeled_csv_reader.py +38 -48
- agno/knowledge/reader/firecrawl_reader.py +3 -2
- agno/knowledge/reader/json_reader.py +16 -22
- agno/knowledge/reader/markdown_reader.py +15 -14
- agno/knowledge/reader/pdf_reader.py +33 -28
- agno/knowledge/reader/pptx_reader.py +9 -10
- agno/knowledge/reader/reader_factory.py +135 -1
- agno/knowledge/reader/s3_reader.py +8 -16
- agno/knowledge/reader/tavily_reader.py +3 -3
- agno/knowledge/reader/text_reader.py +15 -14
- agno/knowledge/reader/utils/__init__.py +17 -0
- agno/knowledge/reader/utils/spreadsheet.py +114 -0
- agno/knowledge/reader/web_search_reader.py +8 -65
- agno/knowledge/reader/website_reader.py +16 -13
- agno/knowledge/reader/wikipedia_reader.py +36 -3
- agno/knowledge/reader/youtube_reader.py +3 -2
- agno/knowledge/remote_content/__init__.py +33 -0
- agno/knowledge/remote_content/config.py +266 -0
- agno/knowledge/remote_content/remote_content.py +105 -17
- agno/knowledge/utils.py +76 -22
- agno/learn/__init__.py +71 -0
- agno/learn/config.py +463 -0
- agno/learn/curate.py +185 -0
- agno/learn/machine.py +725 -0
- agno/learn/schemas.py +1114 -0
- agno/learn/stores/__init__.py +38 -0
- agno/learn/stores/decision_log.py +1156 -0
- agno/learn/stores/entity_memory.py +3275 -0
- agno/learn/stores/learned_knowledge.py +1583 -0
- agno/learn/stores/protocol.py +117 -0
- agno/learn/stores/session_context.py +1217 -0
- agno/learn/stores/user_memory.py +1495 -0
- agno/learn/stores/user_profile.py +1220 -0
- agno/learn/utils.py +209 -0
- agno/media.py +22 -6
- agno/memory/__init__.py +14 -1
- agno/memory/manager.py +223 -8
- agno/memory/strategies/__init__.py +15 -0
- agno/memory/strategies/base.py +66 -0
- agno/memory/strategies/summarize.py +196 -0
- agno/memory/strategies/types.py +37 -0
- agno/models/aimlapi/aimlapi.py +17 -0
- agno/models/anthropic/claude.py +434 -59
- agno/models/aws/bedrock.py +121 -20
- agno/models/aws/claude.py +131 -274
- agno/models/azure/ai_foundry.py +10 -6
- agno/models/azure/openai_chat.py +33 -10
- agno/models/base.py +1162 -561
- agno/models/cerebras/cerebras.py +120 -24
- agno/models/cerebras/cerebras_openai.py +21 -2
- agno/models/cohere/chat.py +65 -6
- agno/models/cometapi/cometapi.py +18 -1
- agno/models/dashscope/dashscope.py +2 -3
- agno/models/deepinfra/deepinfra.py +18 -1
- agno/models/deepseek/deepseek.py +69 -3
- agno/models/fireworks/fireworks.py +18 -1
- agno/models/google/gemini.py +959 -89
- agno/models/google/utils.py +22 -0
- agno/models/groq/groq.py +48 -18
- agno/models/huggingface/huggingface.py +17 -6
- agno/models/ibm/watsonx.py +16 -6
- agno/models/internlm/internlm.py +18 -1
- agno/models/langdb/langdb.py +13 -1
- agno/models/litellm/chat.py +88 -9
- agno/models/litellm/litellm_openai.py +18 -1
- agno/models/message.py +24 -5
- agno/models/meta/llama.py +40 -13
- agno/models/meta/llama_openai.py +22 -21
- agno/models/metrics.py +12 -0
- agno/models/mistral/mistral.py +8 -4
- agno/models/n1n/__init__.py +3 -0
- agno/models/n1n/n1n.py +57 -0
- agno/models/nebius/nebius.py +6 -7
- agno/models/nvidia/nvidia.py +20 -3
- agno/models/ollama/__init__.py +2 -0
- agno/models/ollama/chat.py +17 -6
- agno/models/ollama/responses.py +100 -0
- agno/models/openai/__init__.py +2 -0
- agno/models/openai/chat.py +117 -26
- agno/models/openai/open_responses.py +46 -0
- agno/models/openai/responses.py +110 -32
- agno/models/openrouter/__init__.py +2 -0
- agno/models/openrouter/openrouter.py +67 -2
- agno/models/openrouter/responses.py +146 -0
- agno/models/perplexity/perplexity.py +19 -1
- agno/models/portkey/portkey.py +7 -6
- agno/models/requesty/requesty.py +19 -2
- agno/models/response.py +20 -2
- agno/models/sambanova/sambanova.py +20 -3
- agno/models/siliconflow/siliconflow.py +19 -2
- agno/models/together/together.py +20 -3
- agno/models/vercel/v0.py +20 -3
- agno/models/vertexai/claude.py +124 -4
- agno/models/vllm/vllm.py +19 -14
- agno/models/xai/xai.py +19 -2
- agno/os/app.py +467 -137
- agno/os/auth.py +253 -5
- agno/os/config.py +22 -0
- agno/os/interfaces/a2a/a2a.py +7 -6
- agno/os/interfaces/a2a/router.py +635 -26
- agno/os/interfaces/a2a/utils.py +32 -33
- agno/os/interfaces/agui/agui.py +5 -3
- agno/os/interfaces/agui/router.py +26 -16
- agno/os/interfaces/agui/utils.py +97 -57
- agno/os/interfaces/base.py +7 -7
- agno/os/interfaces/slack/router.py +16 -7
- agno/os/interfaces/slack/slack.py +7 -7
- agno/os/interfaces/whatsapp/router.py +35 -7
- agno/os/interfaces/whatsapp/security.py +3 -1
- agno/os/interfaces/whatsapp/whatsapp.py +11 -8
- agno/os/managers.py +326 -0
- agno/os/mcp.py +652 -79
- agno/os/middleware/__init__.py +4 -0
- agno/os/middleware/jwt.py +718 -115
- agno/os/middleware/trailing_slash.py +27 -0
- agno/os/router.py +105 -1558
- agno/os/routers/agents/__init__.py +3 -0
- agno/os/routers/agents/router.py +655 -0
- agno/os/routers/agents/schema.py +288 -0
- agno/os/routers/components/__init__.py +3 -0
- agno/os/routers/components/components.py +475 -0
- agno/os/routers/database.py +155 -0
- agno/os/routers/evals/evals.py +111 -18
- agno/os/routers/evals/schemas.py +38 -5
- agno/os/routers/evals/utils.py +80 -11
- agno/os/routers/health.py +3 -3
- agno/os/routers/knowledge/knowledge.py +284 -35
- agno/os/routers/knowledge/schemas.py +14 -2
- agno/os/routers/memory/memory.py +274 -11
- agno/os/routers/memory/schemas.py +44 -3
- agno/os/routers/metrics/metrics.py +30 -15
- agno/os/routers/metrics/schemas.py +10 -6
- agno/os/routers/registry/__init__.py +3 -0
- agno/os/routers/registry/registry.py +337 -0
- agno/os/routers/session/session.py +143 -14
- agno/os/routers/teams/__init__.py +3 -0
- agno/os/routers/teams/router.py +550 -0
- agno/os/routers/teams/schema.py +280 -0
- agno/os/routers/traces/__init__.py +3 -0
- agno/os/routers/traces/schemas.py +414 -0
- agno/os/routers/traces/traces.py +549 -0
- agno/os/routers/workflows/__init__.py +3 -0
- agno/os/routers/workflows/router.py +757 -0
- agno/os/routers/workflows/schema.py +139 -0
- agno/os/schema.py +157 -584
- agno/os/scopes.py +469 -0
- agno/os/settings.py +3 -0
- agno/os/utils.py +574 -185
- agno/reasoning/anthropic.py +85 -1
- agno/reasoning/azure_ai_foundry.py +93 -1
- agno/reasoning/deepseek.py +102 -2
- agno/reasoning/default.py +6 -7
- agno/reasoning/gemini.py +87 -3
- agno/reasoning/groq.py +109 -2
- agno/reasoning/helpers.py +6 -7
- agno/reasoning/manager.py +1238 -0
- agno/reasoning/ollama.py +93 -1
- agno/reasoning/openai.py +115 -1
- agno/reasoning/vertexai.py +85 -1
- agno/registry/__init__.py +3 -0
- agno/registry/registry.py +68 -0
- agno/remote/__init__.py +3 -0
- agno/remote/base.py +581 -0
- agno/run/__init__.py +2 -4
- agno/run/agent.py +134 -19
- agno/run/base.py +49 -1
- agno/run/cancel.py +65 -52
- agno/run/cancellation_management/__init__.py +9 -0
- agno/run/cancellation_management/base.py +78 -0
- agno/run/cancellation_management/in_memory_cancellation_manager.py +100 -0
- agno/run/cancellation_management/redis_cancellation_manager.py +236 -0
- agno/run/requirement.py +181 -0
- agno/run/team.py +111 -19
- agno/run/workflow.py +2 -1
- agno/session/agent.py +57 -92
- agno/session/summary.py +1 -1
- agno/session/team.py +62 -115
- agno/session/workflow.py +353 -57
- agno/skills/__init__.py +17 -0
- agno/skills/agent_skills.py +377 -0
- agno/skills/errors.py +32 -0
- agno/skills/loaders/__init__.py +4 -0
- agno/skills/loaders/base.py +27 -0
- agno/skills/loaders/local.py +216 -0
- agno/skills/skill.py +65 -0
- agno/skills/utils.py +107 -0
- agno/skills/validator.py +277 -0
- agno/table.py +10 -0
- agno/team/__init__.py +5 -1
- agno/team/remote.py +447 -0
- agno/team/team.py +3769 -2202
- agno/tools/brandfetch.py +27 -18
- agno/tools/browserbase.py +225 -16
- agno/tools/crawl4ai.py +3 -0
- agno/tools/duckduckgo.py +25 -71
- agno/tools/exa.py +0 -21
- agno/tools/file.py +14 -13
- agno/tools/file_generation.py +12 -6
- agno/tools/firecrawl.py +15 -7
- agno/tools/function.py +94 -113
- agno/tools/google_bigquery.py +11 -2
- agno/tools/google_drive.py +4 -3
- agno/tools/knowledge.py +9 -4
- agno/tools/mcp/mcp.py +301 -18
- agno/tools/mcp/multi_mcp.py +269 -14
- agno/tools/mem0.py +11 -10
- agno/tools/memory.py +47 -46
- agno/tools/mlx_transcribe.py +10 -7
- agno/tools/models/nebius.py +5 -5
- agno/tools/models_labs.py +20 -10
- agno/tools/nano_banana.py +151 -0
- agno/tools/parallel.py +0 -7
- agno/tools/postgres.py +76 -36
- agno/tools/python.py +14 -6
- agno/tools/reasoning.py +30 -23
- agno/tools/redshift.py +406 -0
- agno/tools/shopify.py +1519 -0
- agno/tools/spotify.py +919 -0
- agno/tools/tavily.py +4 -1
- agno/tools/toolkit.py +253 -18
- agno/tools/websearch.py +93 -0
- agno/tools/website.py +1 -1
- agno/tools/wikipedia.py +1 -1
- agno/tools/workflow.py +56 -48
- agno/tools/yfinance.py +12 -11
- agno/tracing/__init__.py +12 -0
- agno/tracing/exporter.py +161 -0
- agno/tracing/schemas.py +276 -0
- agno/tracing/setup.py +112 -0
- agno/utils/agent.py +251 -10
- agno/utils/cryptography.py +22 -0
- agno/utils/dttm.py +33 -0
- agno/utils/events.py +264 -7
- agno/utils/hooks.py +111 -3
- agno/utils/http.py +161 -2
- agno/utils/mcp.py +49 -8
- agno/utils/media.py +22 -1
- agno/utils/models/ai_foundry.py +9 -2
- agno/utils/models/claude.py +20 -5
- agno/utils/models/cohere.py +9 -2
- agno/utils/models/llama.py +9 -2
- agno/utils/models/mistral.py +4 -2
- agno/utils/os.py +0 -0
- agno/utils/print_response/agent.py +99 -16
- agno/utils/print_response/team.py +223 -24
- agno/utils/print_response/workflow.py +0 -2
- agno/utils/prompts.py +8 -6
- agno/utils/remote.py +23 -0
- agno/utils/response.py +1 -13
- agno/utils/string.py +91 -2
- agno/utils/team.py +62 -12
- agno/utils/tokens.py +657 -0
- agno/vectordb/base.py +15 -2
- agno/vectordb/cassandra/cassandra.py +1 -1
- agno/vectordb/chroma/__init__.py +2 -1
- agno/vectordb/chroma/chromadb.py +468 -23
- agno/vectordb/clickhouse/clickhousedb.py +1 -1
- agno/vectordb/couchbase/couchbase.py +6 -2
- agno/vectordb/lancedb/lance_db.py +7 -38
- agno/vectordb/lightrag/lightrag.py +7 -6
- agno/vectordb/milvus/milvus.py +118 -84
- agno/vectordb/mongodb/__init__.py +2 -1
- agno/vectordb/mongodb/mongodb.py +14 -31
- agno/vectordb/pgvector/pgvector.py +120 -66
- agno/vectordb/pineconedb/pineconedb.py +2 -19
- agno/vectordb/qdrant/__init__.py +2 -1
- agno/vectordb/qdrant/qdrant.py +33 -56
- agno/vectordb/redis/__init__.py +2 -1
- agno/vectordb/redis/redisdb.py +19 -31
- agno/vectordb/singlestore/singlestore.py +17 -9
- agno/vectordb/surrealdb/surrealdb.py +2 -38
- agno/vectordb/weaviate/__init__.py +2 -1
- agno/vectordb/weaviate/weaviate.py +7 -3
- agno/workflow/__init__.py +5 -1
- agno/workflow/agent.py +2 -2
- agno/workflow/condition.py +12 -10
- agno/workflow/loop.py +28 -9
- agno/workflow/parallel.py +21 -13
- agno/workflow/remote.py +362 -0
- agno/workflow/router.py +12 -9
- agno/workflow/step.py +261 -36
- agno/workflow/steps.py +12 -8
- agno/workflow/types.py +40 -77
- agno/workflow/workflow.py +939 -213
- {agno-2.2.13.dist-info → agno-2.4.3.dist-info}/METADATA +134 -181
- agno-2.4.3.dist-info/RECORD +677 -0
- {agno-2.2.13.dist-info → agno-2.4.3.dist-info}/WHEEL +1 -1
- agno/tools/googlesearch.py +0 -98
- agno/tools/memori.py +0 -339
- agno-2.2.13.dist-info/RECORD +0 -575
- {agno-2.2.13.dist-info → agno-2.4.3.dist-info}/licenses/LICENSE +0 -0
- {agno-2.2.13.dist-info → agno-2.4.3.dist-info}/top_level.txt +0 -0
agno/db/mongo/async_mongo.py
CHANGED
|
@@ -1,15 +1,19 @@
|
|
|
1
|
+
import asyncio
|
|
1
2
|
import time
|
|
2
3
|
from datetime import date, datetime, timedelta, timezone
|
|
3
|
-
from typing import Any, Dict, List, Optional, Tuple, Union
|
|
4
|
+
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
|
|
4
5
|
from uuid import uuid4
|
|
5
6
|
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from agno.tracing.schemas import Span, Trace
|
|
9
|
+
|
|
6
10
|
from agno.db.base import AsyncBaseDb, SessionType
|
|
7
11
|
from agno.db.mongo.utils import (
|
|
8
12
|
apply_pagination,
|
|
9
13
|
apply_sorting,
|
|
10
14
|
bulk_upsert_metrics,
|
|
11
15
|
calculate_date_metrics,
|
|
12
|
-
|
|
16
|
+
create_collection_indexes_async,
|
|
13
17
|
deserialize_cultural_knowledge_from_db,
|
|
14
18
|
fetch_all_sessions_data,
|
|
15
19
|
get_dates_to_calculate_metrics_for,
|
|
@@ -25,11 +29,26 @@ from agno.utils.log import log_debug, log_error, log_info
|
|
|
25
29
|
from agno.utils.string import generate_id
|
|
26
30
|
|
|
27
31
|
try:
|
|
28
|
-
import asyncio
|
|
29
|
-
|
|
30
32
|
from motor.motor_asyncio import AsyncIOMotorClient, AsyncIOMotorCollection, AsyncIOMotorDatabase # type: ignore
|
|
33
|
+
|
|
34
|
+
MOTOR_AVAILABLE = True
|
|
35
|
+
except ImportError:
|
|
36
|
+
MOTOR_AVAILABLE = False
|
|
37
|
+
AsyncIOMotorClient = None # type: ignore
|
|
38
|
+
AsyncIOMotorCollection = None # type: ignore
|
|
39
|
+
AsyncIOMotorDatabase = None # type: ignore
|
|
40
|
+
|
|
41
|
+
try:
|
|
42
|
+
from pymongo import AsyncMongoClient # type: ignore
|
|
43
|
+
from pymongo.collection import AsyncCollection # type: ignore
|
|
44
|
+
from pymongo.database import AsyncDatabase # type: ignore
|
|
45
|
+
|
|
46
|
+
PYMONGO_ASYNC_AVAILABLE = True
|
|
31
47
|
except ImportError:
|
|
32
|
-
|
|
48
|
+
PYMONGO_ASYNC_AVAILABLE = False
|
|
49
|
+
AsyncMongoClient = None # type: ignore
|
|
50
|
+
AsyncDatabase = None # type: ignore
|
|
51
|
+
AsyncCollection = None # type: ignore
|
|
33
52
|
|
|
34
53
|
try:
|
|
35
54
|
from pymongo import ReturnDocument
|
|
@@ -37,11 +56,89 @@ try:
|
|
|
37
56
|
except ImportError:
|
|
38
57
|
raise ImportError("`pymongo` not installed. Please install it using `pip install -U pymongo`")
|
|
39
58
|
|
|
59
|
+
# Ensure at least one async library is available
|
|
60
|
+
if not MOTOR_AVAILABLE and not PYMONGO_ASYNC_AVAILABLE:
|
|
61
|
+
raise ImportError(
|
|
62
|
+
"Neither `motor` nor PyMongo async is installed. "
|
|
63
|
+
"Please install one of them using:\n"
|
|
64
|
+
" - `pip install -U 'pymongo>=4.9'` (recommended)"
|
|
65
|
+
" - `pip install -U motor` (legacy, deprecated)\n"
|
|
66
|
+
)
|
|
67
|
+
|
|
68
|
+
# Create union types for client, database, and collection
|
|
69
|
+
if TYPE_CHECKING:
|
|
70
|
+
if MOTOR_AVAILABLE and PYMONGO_ASYNC_AVAILABLE:
|
|
71
|
+
AsyncMongoClientType = Union[AsyncIOMotorClient, AsyncMongoClient] # type: ignore
|
|
72
|
+
AsyncMongoDatabaseType = Union[AsyncIOMotorDatabase, AsyncDatabase] # type: ignore
|
|
73
|
+
AsyncMongoCollectionType = Union[AsyncIOMotorCollection, AsyncCollection] # type: ignore
|
|
74
|
+
elif MOTOR_AVAILABLE:
|
|
75
|
+
AsyncMongoClientType = AsyncIOMotorClient # type: ignore
|
|
76
|
+
AsyncMongoDatabaseType = AsyncIOMotorDatabase # type: ignore
|
|
77
|
+
AsyncMongoCollectionType = AsyncIOMotorCollection # type: ignore
|
|
78
|
+
else:
|
|
79
|
+
AsyncMongoClientType = AsyncMongoClient # type: ignore
|
|
80
|
+
AsyncMongoDatabaseType = AsyncDatabase # type: ignore
|
|
81
|
+
AsyncMongoCollectionType = AsyncCollection # type: ignore
|
|
82
|
+
else:
|
|
83
|
+
# Runtime type - use Any to avoid import issues
|
|
84
|
+
AsyncMongoClientType = Any
|
|
85
|
+
AsyncMongoDatabaseType = Any
|
|
86
|
+
AsyncMongoCollectionType = Any
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
# Client type constants (defined before class to allow use in _detect_client_type)
|
|
90
|
+
_CLIENT_TYPE_MOTOR = "motor"
|
|
91
|
+
_CLIENT_TYPE_PYMONGO_ASYNC = "pymongo_async"
|
|
92
|
+
_CLIENT_TYPE_UNKNOWN = "unknown"
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
def _detect_client_type(client: Any) -> str:
|
|
96
|
+
"""Detect whether a client is Motor or PyMongo async."""
|
|
97
|
+
if client is None:
|
|
98
|
+
return _CLIENT_TYPE_UNKNOWN
|
|
99
|
+
|
|
100
|
+
# Check PyMongo async
|
|
101
|
+
if PYMONGO_ASYNC_AVAILABLE and AsyncMongoClient is not None:
|
|
102
|
+
try:
|
|
103
|
+
if isinstance(client, AsyncMongoClient):
|
|
104
|
+
return _CLIENT_TYPE_PYMONGO_ASYNC
|
|
105
|
+
except (TypeError, AttributeError):
|
|
106
|
+
pass # Fall through to next check
|
|
107
|
+
|
|
108
|
+
if MOTOR_AVAILABLE and AsyncIOMotorClient is not None:
|
|
109
|
+
try:
|
|
110
|
+
if isinstance(client, AsyncIOMotorClient):
|
|
111
|
+
return _CLIENT_TYPE_MOTOR
|
|
112
|
+
except (TypeError, AttributeError):
|
|
113
|
+
pass # Fall through to fallback
|
|
114
|
+
|
|
115
|
+
# Fallback to string matching only if isinstance fails
|
|
116
|
+
# (should rarely happen, but useful for edge cases)
|
|
117
|
+
client_type_name = type(client).__name__
|
|
118
|
+
if "Motor" in client_type_name or "AsyncIOMotor" in client_type_name:
|
|
119
|
+
return _CLIENT_TYPE_MOTOR
|
|
120
|
+
elif "AsyncMongo" in client_type_name:
|
|
121
|
+
return _CLIENT_TYPE_PYMONGO_ASYNC
|
|
122
|
+
|
|
123
|
+
# Last resort: check module name
|
|
124
|
+
module_name = type(client).__module__
|
|
125
|
+
if "motor" in module_name:
|
|
126
|
+
return _CLIENT_TYPE_MOTOR
|
|
127
|
+
elif "pymongo" in module_name:
|
|
128
|
+
return _CLIENT_TYPE_PYMONGO_ASYNC
|
|
129
|
+
|
|
130
|
+
return _CLIENT_TYPE_UNKNOWN
|
|
131
|
+
|
|
40
132
|
|
|
41
133
|
class AsyncMongoDb(AsyncBaseDb):
|
|
134
|
+
# Client type constants (class-level access to module constants)
|
|
135
|
+
CLIENT_TYPE_MOTOR = _CLIENT_TYPE_MOTOR
|
|
136
|
+
CLIENT_TYPE_PYMONGO_ASYNC = _CLIENT_TYPE_PYMONGO_ASYNC
|
|
137
|
+
CLIENT_TYPE_UNKNOWN = _CLIENT_TYPE_UNKNOWN
|
|
138
|
+
|
|
42
139
|
def __init__(
|
|
43
140
|
self,
|
|
44
|
-
db_client: Optional[AsyncIOMotorClient] = None,
|
|
141
|
+
db_client: Optional[Union["AsyncIOMotorClient", "AsyncMongoClient"]] = None,
|
|
45
142
|
db_name: Optional[str] = None,
|
|
46
143
|
db_url: Optional[str] = None,
|
|
47
144
|
session_collection: Optional[str] = None,
|
|
@@ -50,13 +147,22 @@ class AsyncMongoDb(AsyncBaseDb):
|
|
|
50
147
|
eval_collection: Optional[str] = None,
|
|
51
148
|
knowledge_collection: Optional[str] = None,
|
|
52
149
|
culture_collection: Optional[str] = None,
|
|
150
|
+
traces_collection: Optional[str] = None,
|
|
151
|
+
spans_collection: Optional[str] = None,
|
|
152
|
+
learnings_collection: Optional[str] = None,
|
|
53
153
|
id: Optional[str] = None,
|
|
54
154
|
):
|
|
55
155
|
"""
|
|
56
|
-
Async interface for interacting with a MongoDB database
|
|
156
|
+
Async interface for interacting with a MongoDB database.
|
|
157
|
+
|
|
158
|
+
Supports both Motor (legacy) and PyMongo async (recommended) clients.
|
|
159
|
+
When both libraries are available, PyMongo async is preferred.
|
|
57
160
|
|
|
58
161
|
Args:
|
|
59
|
-
db_client (Optional[AsyncIOMotorClient]):
|
|
162
|
+
db_client (Optional[Union[AsyncIOMotorClient, AsyncMongoClient]]):
|
|
163
|
+
The MongoDB async client to use. Can be either Motor's AsyncIOMotorClient
|
|
164
|
+
or PyMongo's AsyncMongoClient. If not provided, a client will be created
|
|
165
|
+
from db_url using the preferred available library.
|
|
60
166
|
db_name (Optional[str]): The name of the database to use.
|
|
61
167
|
db_url (Optional[str]): The database URL to connect to.
|
|
62
168
|
session_collection (Optional[str]): Name of the collection to store sessions.
|
|
@@ -65,10 +171,14 @@ class AsyncMongoDb(AsyncBaseDb):
|
|
|
65
171
|
eval_collection (Optional[str]): Name of the collection to store evaluation runs.
|
|
66
172
|
knowledge_collection (Optional[str]): Name of the collection to store knowledge documents.
|
|
67
173
|
culture_collection (Optional[str]): Name of the collection to store cultural knowledge.
|
|
174
|
+
traces_collection (Optional[str]): Name of the collection to store traces.
|
|
175
|
+
spans_collection (Optional[str]): Name of the collection to store spans.
|
|
176
|
+
learnings_collection (Optional[str]): Name of the collection to store learnings.
|
|
68
177
|
id (Optional[str]): ID of the database.
|
|
69
178
|
|
|
70
179
|
Raises:
|
|
71
|
-
ValueError: If neither db_url nor db_client is provided.
|
|
180
|
+
ValueError: If neither db_url nor db_client is provided, or if db_client type is unsupported.
|
|
181
|
+
ImportError: If neither motor nor pymongo async is installed.
|
|
72
182
|
"""
|
|
73
183
|
if id is None:
|
|
74
184
|
base_seed = db_url or str(db_client)
|
|
@@ -84,10 +194,26 @@ class AsyncMongoDb(AsyncBaseDb):
|
|
|
84
194
|
eval_table=eval_collection,
|
|
85
195
|
knowledge_table=knowledge_collection,
|
|
86
196
|
culture_table=culture_collection,
|
|
197
|
+
traces_table=traces_collection,
|
|
198
|
+
spans_table=spans_collection,
|
|
199
|
+
learnings_table=learnings_collection,
|
|
87
200
|
)
|
|
88
201
|
|
|
202
|
+
# Detect client type if provided
|
|
203
|
+
if db_client is not None:
|
|
204
|
+
self._client_type = _detect_client_type(db_client)
|
|
205
|
+
if self._client_type == self.CLIENT_TYPE_UNKNOWN:
|
|
206
|
+
raise ValueError(
|
|
207
|
+
f"Unsupported MongoDB client type: {type(db_client).__name__}. "
|
|
208
|
+
"Only Motor (AsyncIOMotorClient) or PyMongo async (AsyncMongoClient) are supported."
|
|
209
|
+
)
|
|
210
|
+
else:
|
|
211
|
+
# Auto-select preferred library when creating from URL
|
|
212
|
+
# Prefer PyMongo async if available, fallback to Motor
|
|
213
|
+
self._client_type = self.CLIENT_TYPE_PYMONGO_ASYNC if PYMONGO_ASYNC_AVAILABLE else self.CLIENT_TYPE_MOTOR
|
|
214
|
+
|
|
89
215
|
# Store configuration for lazy initialization
|
|
90
|
-
self._provided_client: Optional[
|
|
216
|
+
self._provided_client: Optional[AsyncMongoClientType] = db_client
|
|
91
217
|
self.db_url: Optional[str] = db_url
|
|
92
218
|
self.db_name: str = db_name if db_name is not None else "agno"
|
|
93
219
|
|
|
@@ -95,8 +221,8 @@ class AsyncMongoDb(AsyncBaseDb):
|
|
|
95
221
|
raise ValueError("One of db_url or db_client must be provided")
|
|
96
222
|
|
|
97
223
|
# Client and database will be lazily initialized per event loop
|
|
98
|
-
self._client: Optional[
|
|
99
|
-
self._database: Optional[
|
|
224
|
+
self._client: Optional[AsyncMongoClientType] = None
|
|
225
|
+
self._database: Optional[AsyncMongoDatabaseType] = None
|
|
100
226
|
self._event_loop: Optional[asyncio.AbstractEventLoop] = None
|
|
101
227
|
|
|
102
228
|
async def table_exists(self, table_name: str) -> bool:
|
|
@@ -126,15 +252,27 @@ class AsyncMongoDb(AsyncBaseDb):
|
|
|
126
252
|
if collection_name and not await self.table_exists(collection_name):
|
|
127
253
|
await self._get_collection(collection_type, create_collection_if_not_found=True)
|
|
128
254
|
|
|
129
|
-
def
|
|
255
|
+
async def close(self) -> None:
|
|
256
|
+
"""Close the MongoDB client connection.
|
|
257
|
+
|
|
258
|
+
Should be called during application shutdown to properly release
|
|
259
|
+
all database connections.
|
|
130
260
|
"""
|
|
131
|
-
|
|
261
|
+
if self._client is not None:
|
|
262
|
+
self._client.close()
|
|
263
|
+
self._client = None
|
|
264
|
+
self._database = None
|
|
132
265
|
|
|
133
|
-
|
|
134
|
-
|
|
266
|
+
def _ensure_client(self) -> AsyncMongoClientType:
|
|
267
|
+
"""
|
|
268
|
+
Ensure the MongoDB async client is valid for the current event loop.
|
|
269
|
+
|
|
270
|
+
Both Motor's AsyncIOMotorClient and PyMongo's AsyncMongoClient are tied to
|
|
271
|
+
the event loop they were created in. If we detect a new event loop, we need
|
|
272
|
+
to refresh the client.
|
|
135
273
|
|
|
136
274
|
Returns:
|
|
137
|
-
AsyncIOMotorClient: A valid client for the current event loop.
|
|
275
|
+
Union[AsyncIOMotorClient, AsyncMongoClient]: A valid client for the current event loop.
|
|
138
276
|
"""
|
|
139
277
|
try:
|
|
140
278
|
current_loop = asyncio.get_running_loop()
|
|
@@ -144,8 +282,13 @@ class AsyncMongoDb(AsyncBaseDb):
|
|
|
144
282
|
if self._provided_client is not None:
|
|
145
283
|
self._client = self._provided_client
|
|
146
284
|
elif self.db_url is not None:
|
|
147
|
-
|
|
148
|
-
|
|
285
|
+
# Create client based on detected type
|
|
286
|
+
if self._client_type == self.CLIENT_TYPE_PYMONGO_ASYNC and PYMONGO_ASYNC_AVAILABLE:
|
|
287
|
+
self._client = AsyncMongoClient(self.db_url) # type: ignore
|
|
288
|
+
elif self._client_type == self.CLIENT_TYPE_MOTOR and MOTOR_AVAILABLE:
|
|
289
|
+
self._client = AsyncIOMotorClient(self.db_url) # type: ignore
|
|
290
|
+
else:
|
|
291
|
+
raise RuntimeError(f"Client type '{self._client_type}' not available")
|
|
149
292
|
return self._client # type: ignore
|
|
150
293
|
|
|
151
294
|
# Check if we're in a different event loop
|
|
@@ -153,43 +296,47 @@ class AsyncMongoDb(AsyncBaseDb):
|
|
|
153
296
|
# New event loop detected, create new client
|
|
154
297
|
if self._provided_client is not None:
|
|
155
298
|
# User provided a client, use it but warn them
|
|
299
|
+
client_type_name = (
|
|
300
|
+
"AsyncMongoClient" if self._client_type == self.CLIENT_TYPE_PYMONGO_ASYNC else "AsyncIOMotorClient"
|
|
301
|
+
)
|
|
156
302
|
log_debug(
|
|
157
|
-
"New event loop detected. Using provided
|
|
303
|
+
f"New event loop detected. Using provided {client_type_name}, "
|
|
158
304
|
"which may cause issues if it was created in a different event loop."
|
|
159
305
|
)
|
|
160
306
|
self._client = self._provided_client
|
|
161
307
|
elif self.db_url is not None:
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
308
|
+
if self._client_type == self.CLIENT_TYPE_PYMONGO_ASYNC and PYMONGO_ASYNC_AVAILABLE:
|
|
309
|
+
self._client = AsyncMongoClient(self.db_url) # type: ignore
|
|
310
|
+
elif self._client_type == self.CLIENT_TYPE_MOTOR and MOTOR_AVAILABLE:
|
|
311
|
+
self._client = AsyncIOMotorClient(self.db_url) # type: ignore
|
|
312
|
+
else:
|
|
313
|
+
raise RuntimeError(f"Client type '{self._client_type}' not available")
|
|
167
314
|
|
|
168
315
|
self._event_loop = current_loop
|
|
169
316
|
self._database = None # Reset database reference
|
|
170
|
-
# Clear collection caches when switching event loops
|
|
317
|
+
# Clear collection caches and initialization flags when switching event loops
|
|
171
318
|
for attr in list(vars(self).keys()):
|
|
172
|
-
if attr.endswith("_collection"):
|
|
319
|
+
if attr.endswith("_collection") or attr.endswith("_initialized"):
|
|
173
320
|
delattr(self, attr)
|
|
174
321
|
|
|
175
322
|
return self._client # type: ignore
|
|
176
323
|
|
|
177
324
|
@property
|
|
178
|
-
def db_client(self) ->
|
|
325
|
+
def db_client(self) -> AsyncMongoClientType:
|
|
179
326
|
"""Get the MongoDB client, ensuring it's valid for the current event loop."""
|
|
180
327
|
return self._ensure_client()
|
|
181
328
|
|
|
182
329
|
@property
|
|
183
|
-
def database(self) ->
|
|
330
|
+
def database(self) -> AsyncMongoDatabaseType:
|
|
184
331
|
"""Get the MongoDB database, ensuring it's valid for the current event loop."""
|
|
185
332
|
try:
|
|
186
333
|
current_loop = asyncio.get_running_loop()
|
|
187
334
|
if self._database is None or self._event_loop != current_loop:
|
|
188
|
-
self._database = self.db_client[self.db_name]
|
|
335
|
+
self._database = self.db_client[self.db_name] # type: ignore
|
|
189
336
|
except RuntimeError:
|
|
190
337
|
# No running loop - fallback to existing database or create new one
|
|
191
338
|
if self._database is None:
|
|
192
|
-
self._database = self.db_client[self.db_name]
|
|
339
|
+
self._database = self.db_client[self.db_name] # type: ignore
|
|
193
340
|
return self._database
|
|
194
341
|
|
|
195
342
|
# -- DB methods --
|
|
@@ -204,7 +351,7 @@ class AsyncMongoDb(AsyncBaseDb):
|
|
|
204
351
|
|
|
205
352
|
async def _get_collection(
|
|
206
353
|
self, table_type: str, create_collection_if_not_found: Optional[bool] = True
|
|
207
|
-
) -> Optional[
|
|
354
|
+
) -> Optional[AsyncMongoCollectionType]:
|
|
208
355
|
"""Get or create a collection based on table type.
|
|
209
356
|
|
|
210
357
|
Args:
|
|
@@ -212,7 +359,7 @@ class AsyncMongoDb(AsyncBaseDb):
|
|
|
212
359
|
create_collection_if_not_found (Optional[bool]): Whether to create the collection if it doesn't exist.
|
|
213
360
|
|
|
214
361
|
Returns:
|
|
215
|
-
AsyncIOMotorCollection: The collection object.
|
|
362
|
+
Union[AsyncIOMotorCollection, AsyncCollection]: The collection object.
|
|
216
363
|
"""
|
|
217
364
|
# Ensure client is valid for current event loop before accessing collections
|
|
218
365
|
_ = self.db_client # This triggers _ensure_client()
|
|
@@ -286,11 +433,44 @@ class AsyncMongoDb(AsyncBaseDb):
|
|
|
286
433
|
)
|
|
287
434
|
return self.culture_collection
|
|
288
435
|
|
|
436
|
+
if table_type == "traces":
|
|
437
|
+
if reset_cache or not hasattr(self, "traces_collection"):
|
|
438
|
+
if self.trace_table_name is None:
|
|
439
|
+
raise ValueError("Traces collection was not provided on initialization")
|
|
440
|
+
self.traces_collection = await self._get_or_create_collection(
|
|
441
|
+
collection_name=self.trace_table_name,
|
|
442
|
+
collection_type="traces",
|
|
443
|
+
create_collection_if_not_found=create_collection_if_not_found,
|
|
444
|
+
)
|
|
445
|
+
return self.traces_collection
|
|
446
|
+
|
|
447
|
+
if table_type == "spans":
|
|
448
|
+
if reset_cache or not hasattr(self, "spans_collection"):
|
|
449
|
+
if self.span_table_name is None:
|
|
450
|
+
raise ValueError("Spans collection was not provided on initialization")
|
|
451
|
+
self.spans_collection = await self._get_or_create_collection(
|
|
452
|
+
collection_name=self.span_table_name,
|
|
453
|
+
collection_type="spans",
|
|
454
|
+
create_collection_if_not_found=create_collection_if_not_found,
|
|
455
|
+
)
|
|
456
|
+
return self.spans_collection
|
|
457
|
+
|
|
458
|
+
if table_type == "learnings":
|
|
459
|
+
if reset_cache or not hasattr(self, "learnings_collection"):
|
|
460
|
+
if self.learnings_table_name is None:
|
|
461
|
+
raise ValueError("Learnings collection was not provided on initialization")
|
|
462
|
+
self.learnings_collection = await self._get_or_create_collection(
|
|
463
|
+
collection_name=self.learnings_table_name,
|
|
464
|
+
collection_type="learnings",
|
|
465
|
+
create_collection_if_not_found=create_collection_if_not_found,
|
|
466
|
+
)
|
|
467
|
+
return self.learnings_collection
|
|
468
|
+
|
|
289
469
|
raise ValueError(f"Unknown table type: {table_type}")
|
|
290
470
|
|
|
291
471
|
async def _get_or_create_collection(
|
|
292
472
|
self, collection_name: str, collection_type: str, create_collection_if_not_found: Optional[bool] = True
|
|
293
|
-
) -> Optional[
|
|
473
|
+
) -> Optional[AsyncMongoCollectionType]:
|
|
294
474
|
"""Get or create a collection with proper indexes.
|
|
295
475
|
|
|
296
476
|
Args:
|
|
@@ -299,7 +479,7 @@ class AsyncMongoDb(AsyncBaseDb):
|
|
|
299
479
|
create_collection_if_not_found (Optional[bool]): Whether to create the collection if it doesn't exist.
|
|
300
480
|
|
|
301
481
|
Returns:
|
|
302
|
-
|
|
482
|
+
Union[AsyncIOMotorCollection, AsyncCollection]: The collection object.
|
|
303
483
|
"""
|
|
304
484
|
try:
|
|
305
485
|
collection = self.database[collection_name]
|
|
@@ -307,9 +487,8 @@ class AsyncMongoDb(AsyncBaseDb):
|
|
|
307
487
|
if not hasattr(self, f"_{collection_name}_initialized"):
|
|
308
488
|
if not create_collection_if_not_found:
|
|
309
489
|
return None
|
|
310
|
-
#
|
|
311
|
-
|
|
312
|
-
create_collection_indexes(collection, collection_type) # type: ignore
|
|
490
|
+
# Create indexes asynchronously for async MongoDB collections
|
|
491
|
+
await create_collection_indexes_async(collection, collection_type)
|
|
313
492
|
setattr(self, f"_{collection_name}_initialized", True)
|
|
314
493
|
log_debug(f"Initialized collection '{collection_name}'")
|
|
315
494
|
else:
|
|
@@ -321,6 +500,14 @@ class AsyncMongoDb(AsyncBaseDb):
|
|
|
321
500
|
log_error(f"Error getting collection {collection_name}: {e}")
|
|
322
501
|
raise
|
|
323
502
|
|
|
503
|
+
def get_latest_schema_version(self):
|
|
504
|
+
"""Get the latest version of the database schema."""
|
|
505
|
+
pass
|
|
506
|
+
|
|
507
|
+
def upsert_schema_version(self, version: str) -> None:
|
|
508
|
+
"""Upsert the schema version into the database."""
|
|
509
|
+
pass
|
|
510
|
+
|
|
324
511
|
# -- Session methods --
|
|
325
512
|
|
|
326
513
|
async def delete_session(self, session_id: str) -> bool:
|
|
@@ -1241,6 +1428,9 @@ class AsyncMongoDb(AsyncBaseDb):
|
|
|
1241
1428
|
"memory_id": memory.memory_id,
|
|
1242
1429
|
"memory": memory.memory,
|
|
1243
1430
|
"topics": memory.topics,
|
|
1431
|
+
"input": memory.input,
|
|
1432
|
+
"feedback": memory.feedback,
|
|
1433
|
+
"created_at": memory.created_at,
|
|
1244
1434
|
"updated_at": updated_at,
|
|
1245
1435
|
}
|
|
1246
1436
|
|
|
@@ -1533,7 +1723,7 @@ class AsyncMongoDb(AsyncBaseDb):
|
|
|
1533
1723
|
log_error(f"Exception reading from sessions collection: {e}")
|
|
1534
1724
|
return []
|
|
1535
1725
|
|
|
1536
|
-
async def _get_metrics_calculation_starting_date(self, collection:
|
|
1726
|
+
async def _get_metrics_calculation_starting_date(self, collection: AsyncMongoCollectionType) -> Optional[date]:
|
|
1537
1727
|
"""Get the first date for which metrics calculation is needed."""
|
|
1538
1728
|
try:
|
|
1539
1729
|
result = await collection.find_one({}, sort=[("date", -1)], limit=1)
|
|
@@ -2024,3 +2214,787 @@ class AsyncMongoDb(AsyncBaseDb):
|
|
|
2024
2214
|
except Exception as e:
|
|
2025
2215
|
log_error(f"Error updating eval run name {eval_run_id}: {e}")
|
|
2026
2216
|
raise e
|
|
2217
|
+
|
|
2218
|
+
# --- Traces ---
|
|
2219
|
+
def _get_component_level(
|
|
2220
|
+
self, workflow_id: Optional[str], team_id: Optional[str], agent_id: Optional[str], name: str
|
|
2221
|
+
) -> int:
|
|
2222
|
+
"""Get the component level for a trace based on its context.
|
|
2223
|
+
|
|
2224
|
+
Component levels (higher = more important):
|
|
2225
|
+
- 3: Workflow root (.run or .arun with workflow_id)
|
|
2226
|
+
- 2: Team root (.run or .arun with team_id)
|
|
2227
|
+
- 1: Agent root (.run or .arun with agent_id)
|
|
2228
|
+
- 0: Child span (not a root)
|
|
2229
|
+
|
|
2230
|
+
Args:
|
|
2231
|
+
workflow_id: The workflow ID of the trace.
|
|
2232
|
+
team_id: The team ID of the trace.
|
|
2233
|
+
agent_id: The agent ID of the trace.
|
|
2234
|
+
name: The name of the trace.
|
|
2235
|
+
|
|
2236
|
+
Returns:
|
|
2237
|
+
int: The component level (0-3).
|
|
2238
|
+
"""
|
|
2239
|
+
# Check if name indicates a root span
|
|
2240
|
+
is_root_name = ".run" in name or ".arun" in name
|
|
2241
|
+
|
|
2242
|
+
if not is_root_name:
|
|
2243
|
+
return 0 # Child span (not a root)
|
|
2244
|
+
elif workflow_id:
|
|
2245
|
+
return 3 # Workflow root
|
|
2246
|
+
elif team_id:
|
|
2247
|
+
return 2 # Team root
|
|
2248
|
+
elif agent_id:
|
|
2249
|
+
return 1 # Agent root
|
|
2250
|
+
else:
|
|
2251
|
+
return 0 # Unknown
|
|
2252
|
+
|
|
2253
|
+
async def upsert_trace(self, trace: "Trace") -> None:
|
|
2254
|
+
"""Create or update a single trace record in the database.
|
|
2255
|
+
|
|
2256
|
+
Uses MongoDB's update_one with upsert=True and aggregation pipeline
|
|
2257
|
+
to handle concurrent inserts atomically and avoid race conditions.
|
|
2258
|
+
|
|
2259
|
+
Args:
|
|
2260
|
+
trace: The Trace object to store (one per trace_id).
|
|
2261
|
+
"""
|
|
2262
|
+
try:
|
|
2263
|
+
collection = await self._get_collection(table_type="traces", create_collection_if_not_found=True)
|
|
2264
|
+
if collection is None:
|
|
2265
|
+
return
|
|
2266
|
+
|
|
2267
|
+
trace_dict = trace.to_dict()
|
|
2268
|
+
trace_dict.pop("total_spans", None)
|
|
2269
|
+
trace_dict.pop("error_count", None)
|
|
2270
|
+
|
|
2271
|
+
# Calculate the component level for the new trace
|
|
2272
|
+
new_level = self._get_component_level(trace.workflow_id, trace.team_id, trace.agent_id, trace.name)
|
|
2273
|
+
|
|
2274
|
+
# Use MongoDB aggregation pipeline update for atomic upsert
|
|
2275
|
+
# This allows conditional logic within a single atomic operation
|
|
2276
|
+
pipeline: List[Dict[str, Any]] = [
|
|
2277
|
+
{
|
|
2278
|
+
"$set": {
|
|
2279
|
+
# Always update these fields
|
|
2280
|
+
"status": trace.status,
|
|
2281
|
+
"created_at": {"$ifNull": ["$created_at", trace_dict.get("created_at")]},
|
|
2282
|
+
# Use $min for start_time (keep earliest)
|
|
2283
|
+
"start_time": {
|
|
2284
|
+
"$cond": {
|
|
2285
|
+
"if": {"$eq": [{"$type": "$start_time"}, "missing"]},
|
|
2286
|
+
"then": trace_dict.get("start_time"),
|
|
2287
|
+
"else": {"$min": ["$start_time", trace_dict.get("start_time")]},
|
|
2288
|
+
}
|
|
2289
|
+
},
|
|
2290
|
+
# Use $max for end_time (keep latest)
|
|
2291
|
+
"end_time": {
|
|
2292
|
+
"$cond": {
|
|
2293
|
+
"if": {"$eq": [{"$type": "$end_time"}, "missing"]},
|
|
2294
|
+
"then": trace_dict.get("end_time"),
|
|
2295
|
+
"else": {"$max": ["$end_time", trace_dict.get("end_time")]},
|
|
2296
|
+
}
|
|
2297
|
+
},
|
|
2298
|
+
# Preserve existing non-null context values using $ifNull
|
|
2299
|
+
"run_id": {"$ifNull": [trace.run_id, "$run_id"]},
|
|
2300
|
+
"session_id": {"$ifNull": [trace.session_id, "$session_id"]},
|
|
2301
|
+
"user_id": {"$ifNull": [trace.user_id, "$user_id"]},
|
|
2302
|
+
"agent_id": {"$ifNull": [trace.agent_id, "$agent_id"]},
|
|
2303
|
+
"team_id": {"$ifNull": [trace.team_id, "$team_id"]},
|
|
2304
|
+
"workflow_id": {"$ifNull": [trace.workflow_id, "$workflow_id"]},
|
|
2305
|
+
}
|
|
2306
|
+
},
|
|
2307
|
+
{
|
|
2308
|
+
"$set": {
|
|
2309
|
+
# Calculate duration_ms from the (potentially updated) start_time and end_time
|
|
2310
|
+
# MongoDB stores dates as strings in ISO format, so we need to parse them
|
|
2311
|
+
"duration_ms": {
|
|
2312
|
+
"$cond": {
|
|
2313
|
+
"if": {
|
|
2314
|
+
"$and": [
|
|
2315
|
+
{"$ne": [{"$type": "$start_time"}, "missing"]},
|
|
2316
|
+
{"$ne": [{"$type": "$end_time"}, "missing"]},
|
|
2317
|
+
]
|
|
2318
|
+
},
|
|
2319
|
+
"then": {
|
|
2320
|
+
"$subtract": [
|
|
2321
|
+
{"$toLong": {"$toDate": "$end_time"}},
|
|
2322
|
+
{"$toLong": {"$toDate": "$start_time"}},
|
|
2323
|
+
]
|
|
2324
|
+
},
|
|
2325
|
+
"else": trace_dict.get("duration_ms", 0),
|
|
2326
|
+
}
|
|
2327
|
+
},
|
|
2328
|
+
# Update name based on component level priority
|
|
2329
|
+
# Only update if new trace is from a higher-level component
|
|
2330
|
+
"name": {
|
|
2331
|
+
"$cond": {
|
|
2332
|
+
"if": {"$eq": [{"$type": "$name"}, "missing"]},
|
|
2333
|
+
"then": trace.name,
|
|
2334
|
+
"else": {
|
|
2335
|
+
"$cond": {
|
|
2336
|
+
"if": {
|
|
2337
|
+
"$gt": [
|
|
2338
|
+
new_level,
|
|
2339
|
+
{
|
|
2340
|
+
"$switch": {
|
|
2341
|
+
"branches": [
|
|
2342
|
+
# Check if existing name is a root span
|
|
2343
|
+
{
|
|
2344
|
+
"case": {
|
|
2345
|
+
"$not": {
|
|
2346
|
+
"$or": [
|
|
2347
|
+
{
|
|
2348
|
+
"$regexMatch": {
|
|
2349
|
+
"input": {"$ifNull": ["$name", ""]},
|
|
2350
|
+
"regex": "\\.run",
|
|
2351
|
+
}
|
|
2352
|
+
},
|
|
2353
|
+
{
|
|
2354
|
+
"$regexMatch": {
|
|
2355
|
+
"input": {"$ifNull": ["$name", ""]},
|
|
2356
|
+
"regex": "\\.arun",
|
|
2357
|
+
}
|
|
2358
|
+
},
|
|
2359
|
+
]
|
|
2360
|
+
}
|
|
2361
|
+
},
|
|
2362
|
+
"then": 0,
|
|
2363
|
+
},
|
|
2364
|
+
# Workflow root (level 3)
|
|
2365
|
+
{
|
|
2366
|
+
"case": {"$ne": ["$workflow_id", None]},
|
|
2367
|
+
"then": 3,
|
|
2368
|
+
},
|
|
2369
|
+
# Team root (level 2)
|
|
2370
|
+
{
|
|
2371
|
+
"case": {"$ne": ["$team_id", None]},
|
|
2372
|
+
"then": 2,
|
|
2373
|
+
},
|
|
2374
|
+
# Agent root (level 1)
|
|
2375
|
+
{
|
|
2376
|
+
"case": {"$ne": ["$agent_id", None]},
|
|
2377
|
+
"then": 1,
|
|
2378
|
+
},
|
|
2379
|
+
],
|
|
2380
|
+
"default": 0,
|
|
2381
|
+
}
|
|
2382
|
+
},
|
|
2383
|
+
]
|
|
2384
|
+
},
|
|
2385
|
+
"then": trace.name,
|
|
2386
|
+
"else": "$name",
|
|
2387
|
+
}
|
|
2388
|
+
},
|
|
2389
|
+
}
|
|
2390
|
+
},
|
|
2391
|
+
}
|
|
2392
|
+
},
|
|
2393
|
+
]
|
|
2394
|
+
|
|
2395
|
+
# Perform atomic upsert using aggregation pipeline
|
|
2396
|
+
await collection.update_one(
|
|
2397
|
+
{"trace_id": trace.trace_id},
|
|
2398
|
+
pipeline,
|
|
2399
|
+
upsert=True,
|
|
2400
|
+
)
|
|
2401
|
+
|
|
2402
|
+
except Exception as e:
|
|
2403
|
+
log_error(f"Error creating trace: {e}")
|
|
2404
|
+
# Don't raise - tracing should not break the main application flow
|
|
2405
|
+
|
|
2406
|
+
async def get_trace(
|
|
2407
|
+
self,
|
|
2408
|
+
trace_id: Optional[str] = None,
|
|
2409
|
+
run_id: Optional[str] = None,
|
|
2410
|
+
):
|
|
2411
|
+
"""Get a single trace by trace_id or other filters.
|
|
2412
|
+
|
|
2413
|
+
Args:
|
|
2414
|
+
trace_id: The unique trace identifier.
|
|
2415
|
+
run_id: Filter by run ID (returns first match).
|
|
2416
|
+
|
|
2417
|
+
Returns:
|
|
2418
|
+
Optional[Trace]: The trace if found, None otherwise.
|
|
2419
|
+
|
|
2420
|
+
Note:
|
|
2421
|
+
If multiple filters are provided, trace_id takes precedence.
|
|
2422
|
+
For other filters, the most recent trace is returned.
|
|
2423
|
+
"""
|
|
2424
|
+
try:
|
|
2425
|
+
from agno.tracing.schemas import Trace as TraceSchema
|
|
2426
|
+
|
|
2427
|
+
collection = await self._get_collection(table_type="traces")
|
|
2428
|
+
if collection is None:
|
|
2429
|
+
return None
|
|
2430
|
+
|
|
2431
|
+
# Get spans collection for aggregation
|
|
2432
|
+
spans_collection = await self._get_collection(table_type="spans")
|
|
2433
|
+
|
|
2434
|
+
query: Dict[str, Any] = {}
|
|
2435
|
+
if trace_id:
|
|
2436
|
+
query["trace_id"] = trace_id
|
|
2437
|
+
elif run_id:
|
|
2438
|
+
query["run_id"] = run_id
|
|
2439
|
+
else:
|
|
2440
|
+
log_debug("get_trace called without any filter parameters")
|
|
2441
|
+
return None
|
|
2442
|
+
|
|
2443
|
+
# Find trace with sorting by most recent
|
|
2444
|
+
result = await collection.find_one(query, sort=[("start_time", -1)])
|
|
2445
|
+
|
|
2446
|
+
if result:
|
|
2447
|
+
# Calculate total_spans and error_count from spans collection
|
|
2448
|
+
total_spans = 0
|
|
2449
|
+
error_count = 0
|
|
2450
|
+
if spans_collection is not None:
|
|
2451
|
+
total_spans = await spans_collection.count_documents({"trace_id": result["trace_id"]})
|
|
2452
|
+
error_count = await spans_collection.count_documents(
|
|
2453
|
+
{"trace_id": result["trace_id"], "status_code": "ERROR"}
|
|
2454
|
+
)
|
|
2455
|
+
|
|
2456
|
+
result["total_spans"] = total_spans
|
|
2457
|
+
result["error_count"] = error_count
|
|
2458
|
+
# Remove MongoDB's _id field
|
|
2459
|
+
result.pop("_id", None)
|
|
2460
|
+
return TraceSchema.from_dict(result)
|
|
2461
|
+
return None
|
|
2462
|
+
|
|
2463
|
+
except Exception as e:
|
|
2464
|
+
log_error(f"Error getting trace: {e}")
|
|
2465
|
+
return None
|
|
2466
|
+
|
|
2467
|
+
async def get_traces(
|
|
2468
|
+
self,
|
|
2469
|
+
run_id: Optional[str] = None,
|
|
2470
|
+
session_id: Optional[str] = None,
|
|
2471
|
+
user_id: Optional[str] = None,
|
|
2472
|
+
agent_id: Optional[str] = None,
|
|
2473
|
+
team_id: Optional[str] = None,
|
|
2474
|
+
workflow_id: Optional[str] = None,
|
|
2475
|
+
status: Optional[str] = None,
|
|
2476
|
+
start_time: Optional[datetime] = None,
|
|
2477
|
+
end_time: Optional[datetime] = None,
|
|
2478
|
+
limit: Optional[int] = 20,
|
|
2479
|
+
page: Optional[int] = 1,
|
|
2480
|
+
) -> tuple[List, int]:
|
|
2481
|
+
"""Get traces matching the provided filters with pagination.
|
|
2482
|
+
|
|
2483
|
+
Args:
|
|
2484
|
+
run_id: Filter by run ID.
|
|
2485
|
+
session_id: Filter by session ID.
|
|
2486
|
+
user_id: Filter by user ID.
|
|
2487
|
+
agent_id: Filter by agent ID.
|
|
2488
|
+
team_id: Filter by team ID.
|
|
2489
|
+
workflow_id: Filter by workflow ID.
|
|
2490
|
+
status: Filter by status (OK, ERROR, UNSET).
|
|
2491
|
+
start_time: Filter traces starting after this datetime.
|
|
2492
|
+
end_time: Filter traces ending before this datetime.
|
|
2493
|
+
limit: Maximum number of traces to return per page.
|
|
2494
|
+
page: Page number (1-indexed).
|
|
2495
|
+
|
|
2496
|
+
Returns:
|
|
2497
|
+
tuple[List[Trace], int]: Tuple of (list of matching traces, total count).
|
|
2498
|
+
"""
|
|
2499
|
+
try:
|
|
2500
|
+
from agno.tracing.schemas import Trace as TraceSchema
|
|
2501
|
+
|
|
2502
|
+
collection = await self._get_collection(table_type="traces")
|
|
2503
|
+
if collection is None:
|
|
2504
|
+
log_debug("Traces collection not found")
|
|
2505
|
+
return [], 0
|
|
2506
|
+
|
|
2507
|
+
# Get spans collection for aggregation
|
|
2508
|
+
spans_collection = await self._get_collection(table_type="spans")
|
|
2509
|
+
|
|
2510
|
+
# Build query
|
|
2511
|
+
query: Dict[str, Any] = {}
|
|
2512
|
+
if run_id:
|
|
2513
|
+
query["run_id"] = run_id
|
|
2514
|
+
if session_id:
|
|
2515
|
+
query["session_id"] = session_id
|
|
2516
|
+
if user_id:
|
|
2517
|
+
query["user_id"] = user_id
|
|
2518
|
+
if agent_id:
|
|
2519
|
+
query["agent_id"] = agent_id
|
|
2520
|
+
if team_id:
|
|
2521
|
+
query["team_id"] = team_id
|
|
2522
|
+
if workflow_id:
|
|
2523
|
+
query["workflow_id"] = workflow_id
|
|
2524
|
+
if status:
|
|
2525
|
+
query["status"] = status
|
|
2526
|
+
if start_time:
|
|
2527
|
+
query["start_time"] = {"$gte": start_time.isoformat()}
|
|
2528
|
+
if end_time:
|
|
2529
|
+
if "end_time" in query:
|
|
2530
|
+
query["end_time"]["$lte"] = end_time.isoformat()
|
|
2531
|
+
else:
|
|
2532
|
+
query["end_time"] = {"$lte": end_time.isoformat()}
|
|
2533
|
+
|
|
2534
|
+
# Get total count
|
|
2535
|
+
total_count = await collection.count_documents(query)
|
|
2536
|
+
|
|
2537
|
+
# Apply pagination
|
|
2538
|
+
skip = ((page or 1) - 1) * (limit or 20)
|
|
2539
|
+
cursor = collection.find(query).sort("start_time", -1).skip(skip).limit(limit or 20)
|
|
2540
|
+
|
|
2541
|
+
results = await cursor.to_list(length=None)
|
|
2542
|
+
|
|
2543
|
+
traces = []
|
|
2544
|
+
for row in results:
|
|
2545
|
+
# Calculate total_spans and error_count from spans collection
|
|
2546
|
+
total_spans = 0
|
|
2547
|
+
error_count = 0
|
|
2548
|
+
if spans_collection is not None:
|
|
2549
|
+
total_spans = await spans_collection.count_documents({"trace_id": row["trace_id"]})
|
|
2550
|
+
error_count = await spans_collection.count_documents(
|
|
2551
|
+
{"trace_id": row["trace_id"], "status_code": "ERROR"}
|
|
2552
|
+
)
|
|
2553
|
+
|
|
2554
|
+
row["total_spans"] = total_spans
|
|
2555
|
+
row["error_count"] = error_count
|
|
2556
|
+
# Remove MongoDB's _id field
|
|
2557
|
+
row.pop("_id", None)
|
|
2558
|
+
traces.append(TraceSchema.from_dict(row))
|
|
2559
|
+
|
|
2560
|
+
return traces, total_count
|
|
2561
|
+
|
|
2562
|
+
except Exception as e:
|
|
2563
|
+
log_error(f"Error getting traces: {e}")
|
|
2564
|
+
return [], 0
|
|
2565
|
+
|
|
2566
|
+
async def get_trace_stats(
|
|
2567
|
+
self,
|
|
2568
|
+
user_id: Optional[str] = None,
|
|
2569
|
+
agent_id: Optional[str] = None,
|
|
2570
|
+
team_id: Optional[str] = None,
|
|
2571
|
+
workflow_id: Optional[str] = None,
|
|
2572
|
+
start_time: Optional[datetime] = None,
|
|
2573
|
+
end_time: Optional[datetime] = None,
|
|
2574
|
+
limit: Optional[int] = 20,
|
|
2575
|
+
page: Optional[int] = 1,
|
|
2576
|
+
) -> tuple[List[Dict[str, Any]], int]:
|
|
2577
|
+
"""Get trace statistics grouped by session.
|
|
2578
|
+
|
|
2579
|
+
Args:
|
|
2580
|
+
user_id: Filter by user ID.
|
|
2581
|
+
agent_id: Filter by agent ID.
|
|
2582
|
+
team_id: Filter by team ID.
|
|
2583
|
+
workflow_id: Filter by workflow ID.
|
|
2584
|
+
start_time: Filter sessions with traces created after this datetime.
|
|
2585
|
+
end_time: Filter sessions with traces created before this datetime.
|
|
2586
|
+
limit: Maximum number of sessions to return per page.
|
|
2587
|
+
page: Page number (1-indexed).
|
|
2588
|
+
|
|
2589
|
+
Returns:
|
|
2590
|
+
tuple[List[Dict], int]: Tuple of (list of session stats dicts, total count).
|
|
2591
|
+
Each dict contains: session_id, user_id, agent_id, team_id, total_traces,
|
|
2592
|
+
workflow_id, first_trace_at, last_trace_at.
|
|
2593
|
+
"""
|
|
2594
|
+
try:
|
|
2595
|
+
collection = await self._get_collection(table_type="traces")
|
|
2596
|
+
if collection is None:
|
|
2597
|
+
log_debug("Traces collection not found")
|
|
2598
|
+
return [], 0
|
|
2599
|
+
|
|
2600
|
+
# Build match stage
|
|
2601
|
+
match_stage: Dict[str, Any] = {"session_id": {"$ne": None}}
|
|
2602
|
+
if user_id:
|
|
2603
|
+
match_stage["user_id"] = user_id
|
|
2604
|
+
if agent_id:
|
|
2605
|
+
match_stage["agent_id"] = agent_id
|
|
2606
|
+
if team_id:
|
|
2607
|
+
match_stage["team_id"] = team_id
|
|
2608
|
+
if workflow_id:
|
|
2609
|
+
match_stage["workflow_id"] = workflow_id
|
|
2610
|
+
if start_time:
|
|
2611
|
+
match_stage["created_at"] = {"$gte": start_time.isoformat()}
|
|
2612
|
+
if end_time:
|
|
2613
|
+
if "created_at" in match_stage:
|
|
2614
|
+
match_stage["created_at"]["$lte"] = end_time.isoformat()
|
|
2615
|
+
else:
|
|
2616
|
+
match_stage["created_at"] = {"$lte": end_time.isoformat()}
|
|
2617
|
+
|
|
2618
|
+
# Build aggregation pipeline
|
|
2619
|
+
pipeline: List[Dict[str, Any]] = [
|
|
2620
|
+
{"$match": match_stage},
|
|
2621
|
+
{
|
|
2622
|
+
"$group": {
|
|
2623
|
+
"_id": "$session_id",
|
|
2624
|
+
"user_id": {"$first": "$user_id"},
|
|
2625
|
+
"agent_id": {"$first": "$agent_id"},
|
|
2626
|
+
"team_id": {"$first": "$team_id"},
|
|
2627
|
+
"workflow_id": {"$first": "$workflow_id"},
|
|
2628
|
+
"total_traces": {"$sum": 1},
|
|
2629
|
+
"first_trace_at": {"$min": "$created_at"},
|
|
2630
|
+
"last_trace_at": {"$max": "$created_at"},
|
|
2631
|
+
}
|
|
2632
|
+
},
|
|
2633
|
+
{"$sort": {"last_trace_at": -1}},
|
|
2634
|
+
]
|
|
2635
|
+
|
|
2636
|
+
# Get total count
|
|
2637
|
+
count_pipeline = pipeline + [{"$count": "total"}]
|
|
2638
|
+
count_result = await collection.aggregate(count_pipeline).to_list(length=1)
|
|
2639
|
+
total_count = count_result[0]["total"] if count_result else 0
|
|
2640
|
+
|
|
2641
|
+
# Apply pagination
|
|
2642
|
+
skip = ((page or 1) - 1) * (limit or 20)
|
|
2643
|
+
pipeline.append({"$skip": skip})
|
|
2644
|
+
pipeline.append({"$limit": limit or 20})
|
|
2645
|
+
|
|
2646
|
+
results = await collection.aggregate(pipeline).to_list(length=None)
|
|
2647
|
+
|
|
2648
|
+
# Convert to list of dicts with datetime objects
|
|
2649
|
+
stats_list = []
|
|
2650
|
+
for row in results:
|
|
2651
|
+
# Convert ISO strings to datetime objects
|
|
2652
|
+
first_trace_at_str = row["first_trace_at"]
|
|
2653
|
+
last_trace_at_str = row["last_trace_at"]
|
|
2654
|
+
|
|
2655
|
+
# Parse ISO format strings to datetime objects
|
|
2656
|
+
first_trace_at = datetime.fromisoformat(first_trace_at_str.replace("Z", "+00:00"))
|
|
2657
|
+
last_trace_at = datetime.fromisoformat(last_trace_at_str.replace("Z", "+00:00"))
|
|
2658
|
+
|
|
2659
|
+
stats_list.append(
|
|
2660
|
+
{
|
|
2661
|
+
"session_id": row["_id"],
|
|
2662
|
+
"user_id": row["user_id"],
|
|
2663
|
+
"agent_id": row["agent_id"],
|
|
2664
|
+
"team_id": row["team_id"],
|
|
2665
|
+
"workflow_id": row["workflow_id"],
|
|
2666
|
+
"total_traces": row["total_traces"],
|
|
2667
|
+
"first_trace_at": first_trace_at,
|
|
2668
|
+
"last_trace_at": last_trace_at,
|
|
2669
|
+
}
|
|
2670
|
+
)
|
|
2671
|
+
|
|
2672
|
+
return stats_list, total_count
|
|
2673
|
+
|
|
2674
|
+
except Exception as e:
|
|
2675
|
+
log_error(f"Error getting trace stats: {e}")
|
|
2676
|
+
return [], 0
|
|
2677
|
+
|
|
2678
|
+
# --- Spans ---
|
|
2679
|
+
async def create_span(self, span: "Span") -> None:
|
|
2680
|
+
"""Create a single span in the database.
|
|
2681
|
+
|
|
2682
|
+
Args:
|
|
2683
|
+
span: The Span object to store.
|
|
2684
|
+
"""
|
|
2685
|
+
try:
|
|
2686
|
+
collection = await self._get_collection(table_type="spans", create_collection_if_not_found=True)
|
|
2687
|
+
if collection is None:
|
|
2688
|
+
return
|
|
2689
|
+
|
|
2690
|
+
await collection.insert_one(span.to_dict())
|
|
2691
|
+
|
|
2692
|
+
except Exception as e:
|
|
2693
|
+
log_error(f"Error creating span: {e}")
|
|
2694
|
+
|
|
2695
|
+
async def create_spans(self, spans: List) -> None:
|
|
2696
|
+
"""Create multiple spans in the database as a batch.
|
|
2697
|
+
|
|
2698
|
+
Args:
|
|
2699
|
+
spans: List of Span objects to store.
|
|
2700
|
+
"""
|
|
2701
|
+
if not spans:
|
|
2702
|
+
return
|
|
2703
|
+
|
|
2704
|
+
try:
|
|
2705
|
+
collection = await self._get_collection(table_type="spans", create_collection_if_not_found=True)
|
|
2706
|
+
if collection is None:
|
|
2707
|
+
return
|
|
2708
|
+
|
|
2709
|
+
span_dicts = [span.to_dict() for span in spans]
|
|
2710
|
+
await collection.insert_many(span_dicts)
|
|
2711
|
+
|
|
2712
|
+
except Exception as e:
|
|
2713
|
+
log_error(f"Error creating spans batch: {e}")
|
|
2714
|
+
|
|
2715
|
+
async def get_span(self, span_id: str):
|
|
2716
|
+
"""Get a single span by its span_id.
|
|
2717
|
+
|
|
2718
|
+
Args:
|
|
2719
|
+
span_id: The unique span identifier.
|
|
2720
|
+
|
|
2721
|
+
Returns:
|
|
2722
|
+
Optional[Span]: The span if found, None otherwise.
|
|
2723
|
+
"""
|
|
2724
|
+
try:
|
|
2725
|
+
from agno.tracing.schemas import Span as SpanSchema
|
|
2726
|
+
|
|
2727
|
+
collection = await self._get_collection(table_type="spans")
|
|
2728
|
+
if collection is None:
|
|
2729
|
+
return None
|
|
2730
|
+
|
|
2731
|
+
result = await collection.find_one({"span_id": span_id})
|
|
2732
|
+
if result:
|
|
2733
|
+
# Remove MongoDB's _id field
|
|
2734
|
+
result.pop("_id", None)
|
|
2735
|
+
return SpanSchema.from_dict(result)
|
|
2736
|
+
return None
|
|
2737
|
+
|
|
2738
|
+
except Exception as e:
|
|
2739
|
+
log_error(f"Error getting span: {e}")
|
|
2740
|
+
return None
|
|
2741
|
+
|
|
2742
|
+
async def get_spans(
|
|
2743
|
+
self,
|
|
2744
|
+
trace_id: Optional[str] = None,
|
|
2745
|
+
parent_span_id: Optional[str] = None,
|
|
2746
|
+
limit: Optional[int] = 1000,
|
|
2747
|
+
) -> List:
|
|
2748
|
+
"""Get spans matching the provided filters.
|
|
2749
|
+
|
|
2750
|
+
Args:
|
|
2751
|
+
trace_id: Filter by trace ID.
|
|
2752
|
+
parent_span_id: Filter by parent span ID.
|
|
2753
|
+
limit: Maximum number of spans to return.
|
|
2754
|
+
|
|
2755
|
+
Returns:
|
|
2756
|
+
List[Span]: List of matching spans.
|
|
2757
|
+
"""
|
|
2758
|
+
try:
|
|
2759
|
+
from agno.tracing.schemas import Span as SpanSchema
|
|
2760
|
+
|
|
2761
|
+
collection = await self._get_collection(table_type="spans")
|
|
2762
|
+
if collection is None:
|
|
2763
|
+
return []
|
|
2764
|
+
|
|
2765
|
+
# Build query
|
|
2766
|
+
query: Dict[str, Any] = {}
|
|
2767
|
+
if trace_id:
|
|
2768
|
+
query["trace_id"] = trace_id
|
|
2769
|
+
if parent_span_id:
|
|
2770
|
+
query["parent_span_id"] = parent_span_id
|
|
2771
|
+
|
|
2772
|
+
cursor = collection.find(query).limit(limit or 1000)
|
|
2773
|
+
results = await cursor.to_list(length=None)
|
|
2774
|
+
|
|
2775
|
+
spans = []
|
|
2776
|
+
for row in results:
|
|
2777
|
+
# Remove MongoDB's _id field
|
|
2778
|
+
row.pop("_id", None)
|
|
2779
|
+
spans.append(SpanSchema.from_dict(row))
|
|
2780
|
+
|
|
2781
|
+
return spans
|
|
2782
|
+
|
|
2783
|
+
except Exception as e:
|
|
2784
|
+
log_error(f"Error getting spans: {e}")
|
|
2785
|
+
return []
|
|
2786
|
+
|
|
2787
|
+
# -- Learning methods --
|
|
2788
|
+
async def get_learning(
|
|
2789
|
+
self,
|
|
2790
|
+
learning_type: str,
|
|
2791
|
+
user_id: Optional[str] = None,
|
|
2792
|
+
agent_id: Optional[str] = None,
|
|
2793
|
+
team_id: Optional[str] = None,
|
|
2794
|
+
session_id: Optional[str] = None,
|
|
2795
|
+
namespace: Optional[str] = None,
|
|
2796
|
+
entity_id: Optional[str] = None,
|
|
2797
|
+
entity_type: Optional[str] = None,
|
|
2798
|
+
) -> Optional[Dict[str, Any]]:
|
|
2799
|
+
"""Retrieve a learning record.
|
|
2800
|
+
|
|
2801
|
+
Args:
|
|
2802
|
+
learning_type: Type of learning ('user_profile', 'session_context', etc.)
|
|
2803
|
+
user_id: Filter by user ID.
|
|
2804
|
+
agent_id: Filter by agent ID.
|
|
2805
|
+
team_id: Filter by team ID.
|
|
2806
|
+
session_id: Filter by session ID.
|
|
2807
|
+
namespace: Filter by namespace ('user', 'global', or custom).
|
|
2808
|
+
entity_id: Filter by entity ID (for entity-specific learnings).
|
|
2809
|
+
entity_type: Filter by entity type ('person', 'company', etc.).
|
|
2810
|
+
|
|
2811
|
+
Returns:
|
|
2812
|
+
Dict with 'content' key containing the learning data, or None.
|
|
2813
|
+
"""
|
|
2814
|
+
try:
|
|
2815
|
+
collection = await self._get_collection(table_type="learnings", create_collection_if_not_found=False)
|
|
2816
|
+
if collection is None:
|
|
2817
|
+
return None
|
|
2818
|
+
|
|
2819
|
+
# Build query
|
|
2820
|
+
query: Dict[str, Any] = {"learning_type": learning_type}
|
|
2821
|
+
if user_id is not None:
|
|
2822
|
+
query["user_id"] = user_id
|
|
2823
|
+
if agent_id is not None:
|
|
2824
|
+
query["agent_id"] = agent_id
|
|
2825
|
+
if team_id is not None:
|
|
2826
|
+
query["team_id"] = team_id
|
|
2827
|
+
if session_id is not None:
|
|
2828
|
+
query["session_id"] = session_id
|
|
2829
|
+
if namespace is not None:
|
|
2830
|
+
query["namespace"] = namespace
|
|
2831
|
+
if entity_id is not None:
|
|
2832
|
+
query["entity_id"] = entity_id
|
|
2833
|
+
if entity_type is not None:
|
|
2834
|
+
query["entity_type"] = entity_type
|
|
2835
|
+
|
|
2836
|
+
result = await collection.find_one(query)
|
|
2837
|
+
if result is None:
|
|
2838
|
+
return None
|
|
2839
|
+
|
|
2840
|
+
# Remove MongoDB's _id field
|
|
2841
|
+
result.pop("_id", None)
|
|
2842
|
+
return {"content": result.get("content")}
|
|
2843
|
+
|
|
2844
|
+
except Exception as e:
|
|
2845
|
+
log_debug(f"Error retrieving learning: {e}")
|
|
2846
|
+
return None
|
|
2847
|
+
|
|
2848
|
+
async def upsert_learning(
|
|
2849
|
+
self,
|
|
2850
|
+
id: str,
|
|
2851
|
+
learning_type: str,
|
|
2852
|
+
content: Dict[str, Any],
|
|
2853
|
+
user_id: Optional[str] = None,
|
|
2854
|
+
agent_id: Optional[str] = None,
|
|
2855
|
+
team_id: Optional[str] = None,
|
|
2856
|
+
session_id: Optional[str] = None,
|
|
2857
|
+
namespace: Optional[str] = None,
|
|
2858
|
+
entity_id: Optional[str] = None,
|
|
2859
|
+
entity_type: Optional[str] = None,
|
|
2860
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
2861
|
+
) -> None:
|
|
2862
|
+
"""Insert or update a learning record.
|
|
2863
|
+
|
|
2864
|
+
Args:
|
|
2865
|
+
id: Unique identifier for the learning.
|
|
2866
|
+
learning_type: Type of learning ('user_profile', 'session_context', etc.)
|
|
2867
|
+
content: The learning content as a dict.
|
|
2868
|
+
user_id: Associated user ID.
|
|
2869
|
+
agent_id: Associated agent ID.
|
|
2870
|
+
team_id: Associated team ID.
|
|
2871
|
+
session_id: Associated session ID.
|
|
2872
|
+
namespace: Namespace for scoping ('user', 'global', or custom).
|
|
2873
|
+
entity_id: Associated entity ID (for entity-specific learnings).
|
|
2874
|
+
entity_type: Entity type ('person', 'company', etc.).
|
|
2875
|
+
metadata: Optional metadata.
|
|
2876
|
+
"""
|
|
2877
|
+
try:
|
|
2878
|
+
collection = await self._get_collection(table_type="learnings", create_collection_if_not_found=True)
|
|
2879
|
+
if collection is None:
|
|
2880
|
+
return
|
|
2881
|
+
|
|
2882
|
+
current_time = int(time.time())
|
|
2883
|
+
|
|
2884
|
+
document = {
|
|
2885
|
+
"learning_id": id,
|
|
2886
|
+
"learning_type": learning_type,
|
|
2887
|
+
"namespace": namespace,
|
|
2888
|
+
"user_id": user_id,
|
|
2889
|
+
"agent_id": agent_id,
|
|
2890
|
+
"team_id": team_id,
|
|
2891
|
+
"session_id": session_id,
|
|
2892
|
+
"entity_id": entity_id,
|
|
2893
|
+
"entity_type": entity_type,
|
|
2894
|
+
"content": content,
|
|
2895
|
+
"metadata": metadata,
|
|
2896
|
+
"updated_at": current_time,
|
|
2897
|
+
}
|
|
2898
|
+
|
|
2899
|
+
# Use upsert to insert or update
|
|
2900
|
+
await collection.update_one(
|
|
2901
|
+
{"learning_id": id},
|
|
2902
|
+
{"$set": document, "$setOnInsert": {"created_at": current_time}},
|
|
2903
|
+
upsert=True,
|
|
2904
|
+
)
|
|
2905
|
+
|
|
2906
|
+
log_debug(f"Upserted learning: {id}")
|
|
2907
|
+
|
|
2908
|
+
except Exception as e:
|
|
2909
|
+
log_debug(f"Error upserting learning: {e}")
|
|
2910
|
+
|
|
2911
|
+
async def delete_learning(self, id: str) -> bool:
|
|
2912
|
+
"""Delete a learning record.
|
|
2913
|
+
|
|
2914
|
+
Args:
|
|
2915
|
+
id: The learning ID to delete.
|
|
2916
|
+
|
|
2917
|
+
Returns:
|
|
2918
|
+
True if deleted, False otherwise.
|
|
2919
|
+
"""
|
|
2920
|
+
try:
|
|
2921
|
+
collection = await self._get_collection(table_type="learnings", create_collection_if_not_found=False)
|
|
2922
|
+
if collection is None:
|
|
2923
|
+
return False
|
|
2924
|
+
|
|
2925
|
+
result = await collection.delete_one({"learning_id": id})
|
|
2926
|
+
return result.deleted_count > 0
|
|
2927
|
+
|
|
2928
|
+
except Exception as e:
|
|
2929
|
+
log_debug(f"Error deleting learning: {e}")
|
|
2930
|
+
return False
|
|
2931
|
+
|
|
2932
|
+
async def get_learnings(
|
|
2933
|
+
self,
|
|
2934
|
+
learning_type: Optional[str] = None,
|
|
2935
|
+
user_id: Optional[str] = None,
|
|
2936
|
+
agent_id: Optional[str] = None,
|
|
2937
|
+
team_id: Optional[str] = None,
|
|
2938
|
+
session_id: Optional[str] = None,
|
|
2939
|
+
namespace: Optional[str] = None,
|
|
2940
|
+
entity_id: Optional[str] = None,
|
|
2941
|
+
entity_type: Optional[str] = None,
|
|
2942
|
+
limit: Optional[int] = None,
|
|
2943
|
+
) -> List[Dict[str, Any]]:
|
|
2944
|
+
"""Get multiple learning records.
|
|
2945
|
+
|
|
2946
|
+
Args:
|
|
2947
|
+
learning_type: Filter by learning type.
|
|
2948
|
+
user_id: Filter by user ID.
|
|
2949
|
+
agent_id: Filter by agent ID.
|
|
2950
|
+
team_id: Filter by team ID.
|
|
2951
|
+
session_id: Filter by session ID.
|
|
2952
|
+
namespace: Filter by namespace ('user', 'global', or custom).
|
|
2953
|
+
entity_id: Filter by entity ID (for entity-specific learnings).
|
|
2954
|
+
entity_type: Filter by entity type ('person', 'company', etc.).
|
|
2955
|
+
limit: Maximum number of records to return.
|
|
2956
|
+
|
|
2957
|
+
Returns:
|
|
2958
|
+
List of learning records.
|
|
2959
|
+
"""
|
|
2960
|
+
try:
|
|
2961
|
+
collection = await self._get_collection(table_type="learnings", create_collection_if_not_found=False)
|
|
2962
|
+
if collection is None:
|
|
2963
|
+
return []
|
|
2964
|
+
|
|
2965
|
+
# Build query
|
|
2966
|
+
query: Dict[str, Any] = {}
|
|
2967
|
+
if learning_type is not None:
|
|
2968
|
+
query["learning_type"] = learning_type
|
|
2969
|
+
if user_id is not None:
|
|
2970
|
+
query["user_id"] = user_id
|
|
2971
|
+
if agent_id is not None:
|
|
2972
|
+
query["agent_id"] = agent_id
|
|
2973
|
+
if team_id is not None:
|
|
2974
|
+
query["team_id"] = team_id
|
|
2975
|
+
if session_id is not None:
|
|
2976
|
+
query["session_id"] = session_id
|
|
2977
|
+
if namespace is not None:
|
|
2978
|
+
query["namespace"] = namespace
|
|
2979
|
+
if entity_id is not None:
|
|
2980
|
+
query["entity_id"] = entity_id
|
|
2981
|
+
if entity_type is not None:
|
|
2982
|
+
query["entity_type"] = entity_type
|
|
2983
|
+
|
|
2984
|
+
cursor = collection.find(query)
|
|
2985
|
+
if limit is not None:
|
|
2986
|
+
cursor = cursor.limit(limit)
|
|
2987
|
+
|
|
2988
|
+
results = await cursor.to_list(length=None)
|
|
2989
|
+
|
|
2990
|
+
learnings = []
|
|
2991
|
+
for row in results:
|
|
2992
|
+
# Remove MongoDB's _id field
|
|
2993
|
+
row.pop("_id", None)
|
|
2994
|
+
learnings.append(row)
|
|
2995
|
+
|
|
2996
|
+
return learnings
|
|
2997
|
+
|
|
2998
|
+
except Exception as e:
|
|
2999
|
+
log_debug(f"Error getting learnings: {e}")
|
|
3000
|
+
return []
|