agno 2.2.13__py3-none-any.whl → 2.4.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agno/agent/__init__.py +6 -0
- agno/agent/agent.py +5252 -3145
- agno/agent/remote.py +525 -0
- agno/api/api.py +2 -0
- agno/client/__init__.py +3 -0
- agno/client/a2a/__init__.py +10 -0
- agno/client/a2a/client.py +554 -0
- agno/client/a2a/schemas.py +112 -0
- agno/client/a2a/utils.py +369 -0
- agno/client/os.py +2669 -0
- agno/compression/__init__.py +3 -0
- agno/compression/manager.py +247 -0
- agno/culture/manager.py +2 -2
- agno/db/base.py +927 -6
- agno/db/dynamo/dynamo.py +788 -2
- agno/db/dynamo/schemas.py +128 -0
- agno/db/dynamo/utils.py +26 -3
- agno/db/firestore/firestore.py +674 -50
- agno/db/firestore/schemas.py +41 -0
- agno/db/firestore/utils.py +25 -10
- agno/db/gcs_json/gcs_json_db.py +506 -3
- agno/db/gcs_json/utils.py +14 -2
- agno/db/in_memory/in_memory_db.py +203 -4
- agno/db/in_memory/utils.py +14 -2
- agno/db/json/json_db.py +498 -2
- agno/db/json/utils.py +14 -2
- agno/db/migrations/manager.py +199 -0
- agno/db/migrations/utils.py +19 -0
- agno/db/migrations/v1_to_v2.py +54 -16
- agno/db/migrations/versions/__init__.py +0 -0
- agno/db/migrations/versions/v2_3_0.py +977 -0
- agno/db/mongo/async_mongo.py +1013 -39
- agno/db/mongo/mongo.py +684 -4
- agno/db/mongo/schemas.py +48 -0
- agno/db/mongo/utils.py +17 -0
- agno/db/mysql/__init__.py +2 -1
- agno/db/mysql/async_mysql.py +2958 -0
- agno/db/mysql/mysql.py +722 -53
- agno/db/mysql/schemas.py +77 -11
- agno/db/mysql/utils.py +151 -8
- agno/db/postgres/async_postgres.py +1254 -137
- agno/db/postgres/postgres.py +2316 -93
- agno/db/postgres/schemas.py +153 -21
- agno/db/postgres/utils.py +22 -7
- agno/db/redis/redis.py +531 -3
- agno/db/redis/schemas.py +36 -0
- agno/db/redis/utils.py +31 -15
- agno/db/schemas/evals.py +1 -0
- agno/db/schemas/memory.py +20 -9
- agno/db/singlestore/schemas.py +70 -1
- agno/db/singlestore/singlestore.py +737 -74
- agno/db/singlestore/utils.py +13 -3
- agno/db/sqlite/async_sqlite.py +1069 -89
- agno/db/sqlite/schemas.py +133 -1
- agno/db/sqlite/sqlite.py +2203 -165
- agno/db/sqlite/utils.py +21 -11
- agno/db/surrealdb/models.py +25 -0
- agno/db/surrealdb/surrealdb.py +603 -1
- agno/db/utils.py +60 -0
- agno/eval/__init__.py +26 -3
- agno/eval/accuracy.py +25 -12
- agno/eval/agent_as_judge.py +871 -0
- agno/eval/base.py +29 -0
- agno/eval/performance.py +10 -4
- agno/eval/reliability.py +22 -13
- agno/eval/utils.py +2 -1
- agno/exceptions.py +42 -0
- agno/hooks/__init__.py +3 -0
- agno/hooks/decorator.py +164 -0
- agno/integrations/discord/client.py +13 -2
- agno/knowledge/__init__.py +4 -0
- agno/knowledge/chunking/code.py +90 -0
- agno/knowledge/chunking/document.py +65 -4
- agno/knowledge/chunking/fixed.py +4 -1
- agno/knowledge/chunking/markdown.py +102 -11
- agno/knowledge/chunking/recursive.py +2 -2
- agno/knowledge/chunking/semantic.py +130 -48
- agno/knowledge/chunking/strategy.py +18 -0
- agno/knowledge/embedder/azure_openai.py +0 -1
- agno/knowledge/embedder/google.py +1 -1
- agno/knowledge/embedder/mistral.py +1 -1
- agno/knowledge/embedder/nebius.py +1 -1
- agno/knowledge/embedder/openai.py +16 -12
- agno/knowledge/filesystem.py +412 -0
- agno/knowledge/knowledge.py +4261 -1199
- agno/knowledge/protocol.py +134 -0
- agno/knowledge/reader/arxiv_reader.py +3 -2
- agno/knowledge/reader/base.py +9 -7
- agno/knowledge/reader/csv_reader.py +91 -42
- agno/knowledge/reader/docx_reader.py +9 -10
- agno/knowledge/reader/excel_reader.py +225 -0
- agno/knowledge/reader/field_labeled_csv_reader.py +38 -48
- agno/knowledge/reader/firecrawl_reader.py +3 -2
- agno/knowledge/reader/json_reader.py +16 -22
- agno/knowledge/reader/markdown_reader.py +15 -14
- agno/knowledge/reader/pdf_reader.py +33 -28
- agno/knowledge/reader/pptx_reader.py +9 -10
- agno/knowledge/reader/reader_factory.py +135 -1
- agno/knowledge/reader/s3_reader.py +8 -16
- agno/knowledge/reader/tavily_reader.py +3 -3
- agno/knowledge/reader/text_reader.py +15 -14
- agno/knowledge/reader/utils/__init__.py +17 -0
- agno/knowledge/reader/utils/spreadsheet.py +114 -0
- agno/knowledge/reader/web_search_reader.py +8 -65
- agno/knowledge/reader/website_reader.py +16 -13
- agno/knowledge/reader/wikipedia_reader.py +36 -3
- agno/knowledge/reader/youtube_reader.py +3 -2
- agno/knowledge/remote_content/__init__.py +33 -0
- agno/knowledge/remote_content/config.py +266 -0
- agno/knowledge/remote_content/remote_content.py +105 -17
- agno/knowledge/utils.py +76 -22
- agno/learn/__init__.py +71 -0
- agno/learn/config.py +463 -0
- agno/learn/curate.py +185 -0
- agno/learn/machine.py +725 -0
- agno/learn/schemas.py +1114 -0
- agno/learn/stores/__init__.py +38 -0
- agno/learn/stores/decision_log.py +1156 -0
- agno/learn/stores/entity_memory.py +3275 -0
- agno/learn/stores/learned_knowledge.py +1583 -0
- agno/learn/stores/protocol.py +117 -0
- agno/learn/stores/session_context.py +1217 -0
- agno/learn/stores/user_memory.py +1495 -0
- agno/learn/stores/user_profile.py +1220 -0
- agno/learn/utils.py +209 -0
- agno/media.py +22 -6
- agno/memory/__init__.py +14 -1
- agno/memory/manager.py +223 -8
- agno/memory/strategies/__init__.py +15 -0
- agno/memory/strategies/base.py +66 -0
- agno/memory/strategies/summarize.py +196 -0
- agno/memory/strategies/types.py +37 -0
- agno/models/aimlapi/aimlapi.py +17 -0
- agno/models/anthropic/claude.py +434 -59
- agno/models/aws/bedrock.py +121 -20
- agno/models/aws/claude.py +131 -274
- agno/models/azure/ai_foundry.py +10 -6
- agno/models/azure/openai_chat.py +33 -10
- agno/models/base.py +1162 -561
- agno/models/cerebras/cerebras.py +120 -24
- agno/models/cerebras/cerebras_openai.py +21 -2
- agno/models/cohere/chat.py +65 -6
- agno/models/cometapi/cometapi.py +18 -1
- agno/models/dashscope/dashscope.py +2 -3
- agno/models/deepinfra/deepinfra.py +18 -1
- agno/models/deepseek/deepseek.py +69 -3
- agno/models/fireworks/fireworks.py +18 -1
- agno/models/google/gemini.py +959 -89
- agno/models/google/utils.py +22 -0
- agno/models/groq/groq.py +48 -18
- agno/models/huggingface/huggingface.py +17 -6
- agno/models/ibm/watsonx.py +16 -6
- agno/models/internlm/internlm.py +18 -1
- agno/models/langdb/langdb.py +13 -1
- agno/models/litellm/chat.py +88 -9
- agno/models/litellm/litellm_openai.py +18 -1
- agno/models/message.py +24 -5
- agno/models/meta/llama.py +40 -13
- agno/models/meta/llama_openai.py +22 -21
- agno/models/metrics.py +12 -0
- agno/models/mistral/mistral.py +8 -4
- agno/models/n1n/__init__.py +3 -0
- agno/models/n1n/n1n.py +57 -0
- agno/models/nebius/nebius.py +6 -7
- agno/models/nvidia/nvidia.py +20 -3
- agno/models/ollama/__init__.py +2 -0
- agno/models/ollama/chat.py +17 -6
- agno/models/ollama/responses.py +100 -0
- agno/models/openai/__init__.py +2 -0
- agno/models/openai/chat.py +117 -26
- agno/models/openai/open_responses.py +46 -0
- agno/models/openai/responses.py +110 -32
- agno/models/openrouter/__init__.py +2 -0
- agno/models/openrouter/openrouter.py +67 -2
- agno/models/openrouter/responses.py +146 -0
- agno/models/perplexity/perplexity.py +19 -1
- agno/models/portkey/portkey.py +7 -6
- agno/models/requesty/requesty.py +19 -2
- agno/models/response.py +20 -2
- agno/models/sambanova/sambanova.py +20 -3
- agno/models/siliconflow/siliconflow.py +19 -2
- agno/models/together/together.py +20 -3
- agno/models/vercel/v0.py +20 -3
- agno/models/vertexai/claude.py +124 -4
- agno/models/vllm/vllm.py +19 -14
- agno/models/xai/xai.py +19 -2
- agno/os/app.py +467 -137
- agno/os/auth.py +253 -5
- agno/os/config.py +22 -0
- agno/os/interfaces/a2a/a2a.py +7 -6
- agno/os/interfaces/a2a/router.py +635 -26
- agno/os/interfaces/a2a/utils.py +32 -33
- agno/os/interfaces/agui/agui.py +5 -3
- agno/os/interfaces/agui/router.py +26 -16
- agno/os/interfaces/agui/utils.py +97 -57
- agno/os/interfaces/base.py +7 -7
- agno/os/interfaces/slack/router.py +16 -7
- agno/os/interfaces/slack/slack.py +7 -7
- agno/os/interfaces/whatsapp/router.py +35 -7
- agno/os/interfaces/whatsapp/security.py +3 -1
- agno/os/interfaces/whatsapp/whatsapp.py +11 -8
- agno/os/managers.py +326 -0
- agno/os/mcp.py +652 -79
- agno/os/middleware/__init__.py +4 -0
- agno/os/middleware/jwt.py +718 -115
- agno/os/middleware/trailing_slash.py +27 -0
- agno/os/router.py +105 -1558
- agno/os/routers/agents/__init__.py +3 -0
- agno/os/routers/agents/router.py +655 -0
- agno/os/routers/agents/schema.py +288 -0
- agno/os/routers/components/__init__.py +3 -0
- agno/os/routers/components/components.py +475 -0
- agno/os/routers/database.py +155 -0
- agno/os/routers/evals/evals.py +111 -18
- agno/os/routers/evals/schemas.py +38 -5
- agno/os/routers/evals/utils.py +80 -11
- agno/os/routers/health.py +3 -3
- agno/os/routers/knowledge/knowledge.py +284 -35
- agno/os/routers/knowledge/schemas.py +14 -2
- agno/os/routers/memory/memory.py +274 -11
- agno/os/routers/memory/schemas.py +44 -3
- agno/os/routers/metrics/metrics.py +30 -15
- agno/os/routers/metrics/schemas.py +10 -6
- agno/os/routers/registry/__init__.py +3 -0
- agno/os/routers/registry/registry.py +337 -0
- agno/os/routers/session/session.py +143 -14
- agno/os/routers/teams/__init__.py +3 -0
- agno/os/routers/teams/router.py +550 -0
- agno/os/routers/teams/schema.py +280 -0
- agno/os/routers/traces/__init__.py +3 -0
- agno/os/routers/traces/schemas.py +414 -0
- agno/os/routers/traces/traces.py +549 -0
- agno/os/routers/workflows/__init__.py +3 -0
- agno/os/routers/workflows/router.py +757 -0
- agno/os/routers/workflows/schema.py +139 -0
- agno/os/schema.py +157 -584
- agno/os/scopes.py +469 -0
- agno/os/settings.py +3 -0
- agno/os/utils.py +574 -185
- agno/reasoning/anthropic.py +85 -1
- agno/reasoning/azure_ai_foundry.py +93 -1
- agno/reasoning/deepseek.py +102 -2
- agno/reasoning/default.py +6 -7
- agno/reasoning/gemini.py +87 -3
- agno/reasoning/groq.py +109 -2
- agno/reasoning/helpers.py +6 -7
- agno/reasoning/manager.py +1238 -0
- agno/reasoning/ollama.py +93 -1
- agno/reasoning/openai.py +115 -1
- agno/reasoning/vertexai.py +85 -1
- agno/registry/__init__.py +3 -0
- agno/registry/registry.py +68 -0
- agno/remote/__init__.py +3 -0
- agno/remote/base.py +581 -0
- agno/run/__init__.py +2 -4
- agno/run/agent.py +134 -19
- agno/run/base.py +49 -1
- agno/run/cancel.py +65 -52
- agno/run/cancellation_management/__init__.py +9 -0
- agno/run/cancellation_management/base.py +78 -0
- agno/run/cancellation_management/in_memory_cancellation_manager.py +100 -0
- agno/run/cancellation_management/redis_cancellation_manager.py +236 -0
- agno/run/requirement.py +181 -0
- agno/run/team.py +111 -19
- agno/run/workflow.py +2 -1
- agno/session/agent.py +57 -92
- agno/session/summary.py +1 -1
- agno/session/team.py +62 -115
- agno/session/workflow.py +353 -57
- agno/skills/__init__.py +17 -0
- agno/skills/agent_skills.py +377 -0
- agno/skills/errors.py +32 -0
- agno/skills/loaders/__init__.py +4 -0
- agno/skills/loaders/base.py +27 -0
- agno/skills/loaders/local.py +216 -0
- agno/skills/skill.py +65 -0
- agno/skills/utils.py +107 -0
- agno/skills/validator.py +277 -0
- agno/table.py +10 -0
- agno/team/__init__.py +5 -1
- agno/team/remote.py +447 -0
- agno/team/team.py +3769 -2202
- agno/tools/brandfetch.py +27 -18
- agno/tools/browserbase.py +225 -16
- agno/tools/crawl4ai.py +3 -0
- agno/tools/duckduckgo.py +25 -71
- agno/tools/exa.py +0 -21
- agno/tools/file.py +14 -13
- agno/tools/file_generation.py +12 -6
- agno/tools/firecrawl.py +15 -7
- agno/tools/function.py +94 -113
- agno/tools/google_bigquery.py +11 -2
- agno/tools/google_drive.py +4 -3
- agno/tools/knowledge.py +9 -4
- agno/tools/mcp/mcp.py +301 -18
- agno/tools/mcp/multi_mcp.py +269 -14
- agno/tools/mem0.py +11 -10
- agno/tools/memory.py +47 -46
- agno/tools/mlx_transcribe.py +10 -7
- agno/tools/models/nebius.py +5 -5
- agno/tools/models_labs.py +20 -10
- agno/tools/nano_banana.py +151 -0
- agno/tools/parallel.py +0 -7
- agno/tools/postgres.py +76 -36
- agno/tools/python.py +14 -6
- agno/tools/reasoning.py +30 -23
- agno/tools/redshift.py +406 -0
- agno/tools/shopify.py +1519 -0
- agno/tools/spotify.py +919 -0
- agno/tools/tavily.py +4 -1
- agno/tools/toolkit.py +253 -18
- agno/tools/websearch.py +93 -0
- agno/tools/website.py +1 -1
- agno/tools/wikipedia.py +1 -1
- agno/tools/workflow.py +56 -48
- agno/tools/yfinance.py +12 -11
- agno/tracing/__init__.py +12 -0
- agno/tracing/exporter.py +161 -0
- agno/tracing/schemas.py +276 -0
- agno/tracing/setup.py +112 -0
- agno/utils/agent.py +251 -10
- agno/utils/cryptography.py +22 -0
- agno/utils/dttm.py +33 -0
- agno/utils/events.py +264 -7
- agno/utils/hooks.py +111 -3
- agno/utils/http.py +161 -2
- agno/utils/mcp.py +49 -8
- agno/utils/media.py +22 -1
- agno/utils/models/ai_foundry.py +9 -2
- agno/utils/models/claude.py +20 -5
- agno/utils/models/cohere.py +9 -2
- agno/utils/models/llama.py +9 -2
- agno/utils/models/mistral.py +4 -2
- agno/utils/os.py +0 -0
- agno/utils/print_response/agent.py +99 -16
- agno/utils/print_response/team.py +223 -24
- agno/utils/print_response/workflow.py +0 -2
- agno/utils/prompts.py +8 -6
- agno/utils/remote.py +23 -0
- agno/utils/response.py +1 -13
- agno/utils/string.py +91 -2
- agno/utils/team.py +62 -12
- agno/utils/tokens.py +657 -0
- agno/vectordb/base.py +15 -2
- agno/vectordb/cassandra/cassandra.py +1 -1
- agno/vectordb/chroma/__init__.py +2 -1
- agno/vectordb/chroma/chromadb.py +468 -23
- agno/vectordb/clickhouse/clickhousedb.py +1 -1
- agno/vectordb/couchbase/couchbase.py +6 -2
- agno/vectordb/lancedb/lance_db.py +7 -38
- agno/vectordb/lightrag/lightrag.py +7 -6
- agno/vectordb/milvus/milvus.py +118 -84
- agno/vectordb/mongodb/__init__.py +2 -1
- agno/vectordb/mongodb/mongodb.py +14 -31
- agno/vectordb/pgvector/pgvector.py +120 -66
- agno/vectordb/pineconedb/pineconedb.py +2 -19
- agno/vectordb/qdrant/__init__.py +2 -1
- agno/vectordb/qdrant/qdrant.py +33 -56
- agno/vectordb/redis/__init__.py +2 -1
- agno/vectordb/redis/redisdb.py +19 -31
- agno/vectordb/singlestore/singlestore.py +17 -9
- agno/vectordb/surrealdb/surrealdb.py +2 -38
- agno/vectordb/weaviate/__init__.py +2 -1
- agno/vectordb/weaviate/weaviate.py +7 -3
- agno/workflow/__init__.py +5 -1
- agno/workflow/agent.py +2 -2
- agno/workflow/condition.py +12 -10
- agno/workflow/loop.py +28 -9
- agno/workflow/parallel.py +21 -13
- agno/workflow/remote.py +362 -0
- agno/workflow/router.py +12 -9
- agno/workflow/step.py +261 -36
- agno/workflow/steps.py +12 -8
- agno/workflow/types.py +40 -77
- agno/workflow/workflow.py +939 -213
- {agno-2.2.13.dist-info → agno-2.4.3.dist-info}/METADATA +134 -181
- agno-2.4.3.dist-info/RECORD +677 -0
- {agno-2.2.13.dist-info → agno-2.4.3.dist-info}/WHEEL +1 -1
- agno/tools/googlesearch.py +0 -98
- agno/tools/memori.py +0 -339
- agno-2.2.13.dist-info/RECORD +0 -575
- {agno-2.2.13.dist-info → agno-2.4.3.dist-info}/licenses/LICENSE +0 -0
- {agno-2.2.13.dist-info → agno-2.4.3.dist-info}/top_level.txt +0 -0
agno/db/json/json_db.py
CHANGED
|
@@ -3,9 +3,12 @@ import os
|
|
|
3
3
|
import time
|
|
4
4
|
from datetime import date, datetime, timedelta, timezone
|
|
5
5
|
from pathlib import Path
|
|
6
|
-
from typing import Any, Dict, List, Optional, Tuple, Union
|
|
6
|
+
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
|
|
7
7
|
from uuid import uuid4
|
|
8
8
|
|
|
9
|
+
if TYPE_CHECKING:
|
|
10
|
+
from agno.tracing.schemas import Span, Trace
|
|
11
|
+
|
|
9
12
|
from agno.db.base import BaseDb, SessionType
|
|
10
13
|
from agno.db.json.utils import (
|
|
11
14
|
apply_sorting,
|
|
@@ -34,6 +37,8 @@ class JsonDb(BaseDb):
|
|
|
34
37
|
metrics_table: Optional[str] = None,
|
|
35
38
|
eval_table: Optional[str] = None,
|
|
36
39
|
knowledge_table: Optional[str] = None,
|
|
40
|
+
traces_table: Optional[str] = None,
|
|
41
|
+
spans_table: Optional[str] = None,
|
|
37
42
|
id: Optional[str] = None,
|
|
38
43
|
):
|
|
39
44
|
"""
|
|
@@ -47,6 +52,8 @@ class JsonDb(BaseDb):
|
|
|
47
52
|
metrics_table (Optional[str]): Name of the JSON file to store metrics.
|
|
48
53
|
eval_table (Optional[str]): Name of the JSON file to store evaluation runs.
|
|
49
54
|
knowledge_table (Optional[str]): Name of the JSON file to store knowledge content.
|
|
55
|
+
traces_table (Optional[str]): Name of the JSON file to store run traces.
|
|
56
|
+
spans_table (Optional[str]): Name of the JSON file to store span events.
|
|
50
57
|
id (Optional[str]): ID of the database.
|
|
51
58
|
"""
|
|
52
59
|
if id is None:
|
|
@@ -61,6 +68,8 @@ class JsonDb(BaseDb):
|
|
|
61
68
|
metrics_table=metrics_table,
|
|
62
69
|
eval_table=eval_table,
|
|
63
70
|
knowledge_table=knowledge_table,
|
|
71
|
+
traces_table=traces_table,
|
|
72
|
+
spans_table=spans_table,
|
|
64
73
|
)
|
|
65
74
|
|
|
66
75
|
# Create the directory where the JSON files will be stored, if it doesn't exist
|
|
@@ -124,6 +133,14 @@ class JsonDb(BaseDb):
|
|
|
124
133
|
log_error(f"Error writing to the {file_path} JSON file: {e}")
|
|
125
134
|
raise e
|
|
126
135
|
|
|
136
|
+
def get_latest_schema_version(self):
|
|
137
|
+
"""Get the latest version of the database schema."""
|
|
138
|
+
pass
|
|
139
|
+
|
|
140
|
+
def upsert_schema_version(self, version: str) -> None:
|
|
141
|
+
"""Upsert the schema version into the database."""
|
|
142
|
+
pass
|
|
143
|
+
|
|
127
144
|
# -- Session methods --
|
|
128
145
|
|
|
129
146
|
def delete_session(self, session_id: str) -> bool:
|
|
@@ -625,13 +642,14 @@ class JsonDb(BaseDb):
|
|
|
625
642
|
raise e
|
|
626
643
|
|
|
627
644
|
def get_user_memory_stats(
|
|
628
|
-
self, limit: Optional[int] = None, page: Optional[int] = None
|
|
645
|
+
self, limit: Optional[int] = None, page: Optional[int] = None, user_id: Optional[str] = None
|
|
629
646
|
) -> Tuple[List[Dict[str, Any]], int]:
|
|
630
647
|
"""Get user memory statistics.
|
|
631
648
|
|
|
632
649
|
Args:
|
|
633
650
|
limit (Optional[int]): The maximum number of user stats to return.
|
|
634
651
|
page (Optional[int]): The page number.
|
|
652
|
+
user_id (Optional[str]): User ID for filtering.
|
|
635
653
|
|
|
636
654
|
Returns:
|
|
637
655
|
Tuple[List[Dict[str, Any]], int]: A list of dictionaries containing user stats and total count.
|
|
@@ -642,6 +660,9 @@ class JsonDb(BaseDb):
|
|
|
642
660
|
|
|
643
661
|
for memory in memories:
|
|
644
662
|
memory_user_id = memory.get("user_id")
|
|
663
|
+
# filter by user_id if provided
|
|
664
|
+
if user_id is not None and memory_user_id != user_id:
|
|
665
|
+
continue
|
|
645
666
|
if memory_user_id:
|
|
646
667
|
if memory_user_id not in user_stats:
|
|
647
668
|
user_stats[memory_user_id] = {
|
|
@@ -1326,3 +1347,478 @@ class JsonDb(BaseDb):
|
|
|
1326
1347
|
except Exception as e:
|
|
1327
1348
|
log_error(f"Error upserting cultural knowledge: {e}")
|
|
1328
1349
|
raise e
|
|
1350
|
+
|
|
1351
|
+
# --- Traces ---
|
|
1352
|
+
def upsert_trace(self, trace: "Trace") -> None:
|
|
1353
|
+
"""Create or update a single trace record in the database.
|
|
1354
|
+
|
|
1355
|
+
Args:
|
|
1356
|
+
trace: The Trace object to store (one per trace_id).
|
|
1357
|
+
"""
|
|
1358
|
+
try:
|
|
1359
|
+
traces = self._read_json_file(self.trace_table_name, create_table_if_not_found=True)
|
|
1360
|
+
|
|
1361
|
+
# Check if trace exists
|
|
1362
|
+
existing_idx = None
|
|
1363
|
+
for i, existing in enumerate(traces):
|
|
1364
|
+
if existing.get("trace_id") == trace.trace_id:
|
|
1365
|
+
existing_idx = i
|
|
1366
|
+
break
|
|
1367
|
+
|
|
1368
|
+
if existing_idx is not None:
|
|
1369
|
+
existing = traces[existing_idx]
|
|
1370
|
+
|
|
1371
|
+
# workflow (level 3) > team (level 2) > agent (level 1) > child/unknown (level 0)
|
|
1372
|
+
def get_component_level(workflow_id, team_id, agent_id, name):
|
|
1373
|
+
is_root_name = ".run" in name or ".arun" in name
|
|
1374
|
+
if not is_root_name:
|
|
1375
|
+
return 0
|
|
1376
|
+
elif workflow_id:
|
|
1377
|
+
return 3
|
|
1378
|
+
elif team_id:
|
|
1379
|
+
return 2
|
|
1380
|
+
elif agent_id:
|
|
1381
|
+
return 1
|
|
1382
|
+
else:
|
|
1383
|
+
return 0
|
|
1384
|
+
|
|
1385
|
+
existing_level = get_component_level(
|
|
1386
|
+
existing.get("workflow_id"),
|
|
1387
|
+
existing.get("team_id"),
|
|
1388
|
+
existing.get("agent_id"),
|
|
1389
|
+
existing.get("name", ""),
|
|
1390
|
+
)
|
|
1391
|
+
new_level = get_component_level(trace.workflow_id, trace.team_id, trace.agent_id, trace.name)
|
|
1392
|
+
should_update_name = new_level > existing_level
|
|
1393
|
+
|
|
1394
|
+
# Parse existing start_time to calculate correct duration
|
|
1395
|
+
existing_start_time_str = existing.get("start_time")
|
|
1396
|
+
if isinstance(existing_start_time_str, str):
|
|
1397
|
+
existing_start_time = datetime.fromisoformat(existing_start_time_str.replace("Z", "+00:00"))
|
|
1398
|
+
else:
|
|
1399
|
+
existing_start_time = trace.start_time
|
|
1400
|
+
|
|
1401
|
+
recalculated_duration_ms = int((trace.end_time - existing_start_time).total_seconds() * 1000)
|
|
1402
|
+
|
|
1403
|
+
# Update existing trace
|
|
1404
|
+
existing["end_time"] = trace.end_time.isoformat()
|
|
1405
|
+
existing["duration_ms"] = recalculated_duration_ms
|
|
1406
|
+
existing["status"] = trace.status
|
|
1407
|
+
if should_update_name:
|
|
1408
|
+
existing["name"] = trace.name
|
|
1409
|
+
|
|
1410
|
+
# Update context fields only if new value is not None
|
|
1411
|
+
if trace.run_id is not None:
|
|
1412
|
+
existing["run_id"] = trace.run_id
|
|
1413
|
+
if trace.session_id is not None:
|
|
1414
|
+
existing["session_id"] = trace.session_id
|
|
1415
|
+
if trace.user_id is not None:
|
|
1416
|
+
existing["user_id"] = trace.user_id
|
|
1417
|
+
if trace.agent_id is not None:
|
|
1418
|
+
existing["agent_id"] = trace.agent_id
|
|
1419
|
+
if trace.team_id is not None:
|
|
1420
|
+
existing["team_id"] = trace.team_id
|
|
1421
|
+
if trace.workflow_id is not None:
|
|
1422
|
+
existing["workflow_id"] = trace.workflow_id
|
|
1423
|
+
|
|
1424
|
+
traces[existing_idx] = existing
|
|
1425
|
+
else:
|
|
1426
|
+
# Add new trace
|
|
1427
|
+
trace_dict = trace.to_dict()
|
|
1428
|
+
trace_dict.pop("total_spans", None)
|
|
1429
|
+
trace_dict.pop("error_count", None)
|
|
1430
|
+
traces.append(trace_dict)
|
|
1431
|
+
|
|
1432
|
+
self._write_json_file(self.trace_table_name, traces)
|
|
1433
|
+
|
|
1434
|
+
except Exception as e:
|
|
1435
|
+
log_error(f"Error creating trace: {e}")
|
|
1436
|
+
|
|
1437
|
+
def get_trace(
|
|
1438
|
+
self,
|
|
1439
|
+
trace_id: Optional[str] = None,
|
|
1440
|
+
run_id: Optional[str] = None,
|
|
1441
|
+
):
|
|
1442
|
+
"""Get a single trace by trace_id or other filters.
|
|
1443
|
+
|
|
1444
|
+
Args:
|
|
1445
|
+
trace_id: The unique trace identifier.
|
|
1446
|
+
run_id: Filter by run ID (returns first match).
|
|
1447
|
+
|
|
1448
|
+
Returns:
|
|
1449
|
+
Optional[Trace]: The trace if found, None otherwise.
|
|
1450
|
+
"""
|
|
1451
|
+
try:
|
|
1452
|
+
from agno.tracing.schemas import Trace
|
|
1453
|
+
|
|
1454
|
+
traces = self._read_json_file(self.trace_table_name, create_table_if_not_found=False)
|
|
1455
|
+
if not traces:
|
|
1456
|
+
return None
|
|
1457
|
+
|
|
1458
|
+
# Get spans for calculating total_spans and error_count
|
|
1459
|
+
spans = self._read_json_file(self.span_table_name, create_table_if_not_found=False)
|
|
1460
|
+
|
|
1461
|
+
# Filter traces
|
|
1462
|
+
filtered = []
|
|
1463
|
+
for t in traces:
|
|
1464
|
+
if trace_id and t.get("trace_id") == trace_id:
|
|
1465
|
+
filtered.append(t)
|
|
1466
|
+
break
|
|
1467
|
+
elif run_id and t.get("run_id") == run_id:
|
|
1468
|
+
filtered.append(t)
|
|
1469
|
+
|
|
1470
|
+
if not filtered:
|
|
1471
|
+
return None
|
|
1472
|
+
|
|
1473
|
+
# Sort by start_time desc and get first
|
|
1474
|
+
filtered.sort(key=lambda x: x.get("start_time", ""), reverse=True)
|
|
1475
|
+
trace_data = filtered[0]
|
|
1476
|
+
|
|
1477
|
+
# Calculate total_spans and error_count
|
|
1478
|
+
trace_spans = [s for s in spans if s.get("trace_id") == trace_data.get("trace_id")]
|
|
1479
|
+
trace_data["total_spans"] = len(trace_spans)
|
|
1480
|
+
trace_data["error_count"] = sum(1 for s in trace_spans if s.get("status_code") == "ERROR")
|
|
1481
|
+
|
|
1482
|
+
return Trace.from_dict(trace_data)
|
|
1483
|
+
|
|
1484
|
+
except Exception as e:
|
|
1485
|
+
log_error(f"Error getting trace: {e}")
|
|
1486
|
+
return None
|
|
1487
|
+
|
|
1488
|
+
def get_traces(
|
|
1489
|
+
self,
|
|
1490
|
+
run_id: Optional[str] = None,
|
|
1491
|
+
session_id: Optional[str] = None,
|
|
1492
|
+
user_id: Optional[str] = None,
|
|
1493
|
+
agent_id: Optional[str] = None,
|
|
1494
|
+
team_id: Optional[str] = None,
|
|
1495
|
+
workflow_id: Optional[str] = None,
|
|
1496
|
+
status: Optional[str] = None,
|
|
1497
|
+
start_time: Optional[datetime] = None,
|
|
1498
|
+
end_time: Optional[datetime] = None,
|
|
1499
|
+
limit: Optional[int] = 20,
|
|
1500
|
+
page: Optional[int] = 1,
|
|
1501
|
+
) -> tuple[List, int]:
|
|
1502
|
+
"""Get traces matching the provided filters with pagination.
|
|
1503
|
+
|
|
1504
|
+
Args:
|
|
1505
|
+
run_id: Filter by run ID.
|
|
1506
|
+
session_id: Filter by session ID.
|
|
1507
|
+
user_id: Filter by user ID.
|
|
1508
|
+
agent_id: Filter by agent ID.
|
|
1509
|
+
team_id: Filter by team ID.
|
|
1510
|
+
workflow_id: Filter by workflow ID.
|
|
1511
|
+
status: Filter by status (OK, ERROR, UNSET).
|
|
1512
|
+
start_time: Filter traces starting after this datetime.
|
|
1513
|
+
end_time: Filter traces ending before this datetime.
|
|
1514
|
+
limit: Maximum number of traces to return per page.
|
|
1515
|
+
page: Page number (1-indexed).
|
|
1516
|
+
|
|
1517
|
+
Returns:
|
|
1518
|
+
tuple[List[Trace], int]: Tuple of (list of matching traces, total count).
|
|
1519
|
+
"""
|
|
1520
|
+
try:
|
|
1521
|
+
from agno.tracing.schemas import Trace
|
|
1522
|
+
|
|
1523
|
+
traces = self._read_json_file(self.trace_table_name, create_table_if_not_found=False)
|
|
1524
|
+
if not traces:
|
|
1525
|
+
return [], 0
|
|
1526
|
+
|
|
1527
|
+
# Get spans for calculating total_spans and error_count
|
|
1528
|
+
spans = self._read_json_file(self.span_table_name, create_table_if_not_found=False)
|
|
1529
|
+
|
|
1530
|
+
# Apply filters
|
|
1531
|
+
filtered = []
|
|
1532
|
+
for t in traces:
|
|
1533
|
+
if run_id and t.get("run_id") != run_id:
|
|
1534
|
+
continue
|
|
1535
|
+
if session_id and t.get("session_id") != session_id:
|
|
1536
|
+
continue
|
|
1537
|
+
if user_id and t.get("user_id") != user_id:
|
|
1538
|
+
continue
|
|
1539
|
+
if agent_id and t.get("agent_id") != agent_id:
|
|
1540
|
+
continue
|
|
1541
|
+
if team_id and t.get("team_id") != team_id:
|
|
1542
|
+
continue
|
|
1543
|
+
if workflow_id and t.get("workflow_id") != workflow_id:
|
|
1544
|
+
continue
|
|
1545
|
+
if status and t.get("status") != status:
|
|
1546
|
+
continue
|
|
1547
|
+
if start_time:
|
|
1548
|
+
trace_start = t.get("start_time", "")
|
|
1549
|
+
if trace_start < start_time.isoformat():
|
|
1550
|
+
continue
|
|
1551
|
+
if end_time:
|
|
1552
|
+
trace_end = t.get("end_time", "")
|
|
1553
|
+
if trace_end > end_time.isoformat():
|
|
1554
|
+
continue
|
|
1555
|
+
filtered.append(t)
|
|
1556
|
+
|
|
1557
|
+
total_count = len(filtered)
|
|
1558
|
+
|
|
1559
|
+
# Sort by start_time desc
|
|
1560
|
+
filtered.sort(key=lambda x: x.get("start_time", ""), reverse=True)
|
|
1561
|
+
|
|
1562
|
+
# Apply pagination
|
|
1563
|
+
if limit and page:
|
|
1564
|
+
start_idx = (page - 1) * limit
|
|
1565
|
+
filtered = filtered[start_idx : start_idx + limit]
|
|
1566
|
+
|
|
1567
|
+
# Add total_spans and error_count to each trace
|
|
1568
|
+
result_traces = []
|
|
1569
|
+
for t in filtered:
|
|
1570
|
+
trace_spans = [s for s in spans if s.get("trace_id") == t.get("trace_id")]
|
|
1571
|
+
t["total_spans"] = len(trace_spans)
|
|
1572
|
+
t["error_count"] = sum(1 for s in trace_spans if s.get("status_code") == "ERROR")
|
|
1573
|
+
result_traces.append(Trace.from_dict(t))
|
|
1574
|
+
|
|
1575
|
+
return result_traces, total_count
|
|
1576
|
+
|
|
1577
|
+
except Exception as e:
|
|
1578
|
+
log_error(f"Error getting traces: {e}")
|
|
1579
|
+
return [], 0
|
|
1580
|
+
|
|
1581
|
+
def get_trace_stats(
|
|
1582
|
+
self,
|
|
1583
|
+
user_id: Optional[str] = None,
|
|
1584
|
+
agent_id: Optional[str] = None,
|
|
1585
|
+
team_id: Optional[str] = None,
|
|
1586
|
+
workflow_id: Optional[str] = None,
|
|
1587
|
+
start_time: Optional[datetime] = None,
|
|
1588
|
+
end_time: Optional[datetime] = None,
|
|
1589
|
+
limit: Optional[int] = 20,
|
|
1590
|
+
page: Optional[int] = 1,
|
|
1591
|
+
) -> tuple[List[Dict[str, Any]], int]:
|
|
1592
|
+
"""Get trace statistics grouped by session.
|
|
1593
|
+
|
|
1594
|
+
Args:
|
|
1595
|
+
user_id: Filter by user ID.
|
|
1596
|
+
agent_id: Filter by agent ID.
|
|
1597
|
+
team_id: Filter by team ID.
|
|
1598
|
+
workflow_id: Filter by workflow ID.
|
|
1599
|
+
start_time: Filter sessions with traces created after this datetime.
|
|
1600
|
+
end_time: Filter sessions with traces created before this datetime.
|
|
1601
|
+
limit: Maximum number of sessions to return per page.
|
|
1602
|
+
page: Page number (1-indexed).
|
|
1603
|
+
|
|
1604
|
+
Returns:
|
|
1605
|
+
tuple[List[Dict], int]: Tuple of (list of session stats dicts, total count).
|
|
1606
|
+
"""
|
|
1607
|
+
try:
|
|
1608
|
+
traces = self._read_json_file(self.trace_table_name, create_table_if_not_found=False)
|
|
1609
|
+
if not traces:
|
|
1610
|
+
return [], 0
|
|
1611
|
+
|
|
1612
|
+
# Group by session_id
|
|
1613
|
+
session_stats: Dict[str, Dict[str, Any]] = {}
|
|
1614
|
+
|
|
1615
|
+
for t in traces:
|
|
1616
|
+
session_id = t.get("session_id")
|
|
1617
|
+
if not session_id:
|
|
1618
|
+
continue
|
|
1619
|
+
|
|
1620
|
+
# Apply filters
|
|
1621
|
+
if user_id and t.get("user_id") != user_id:
|
|
1622
|
+
continue
|
|
1623
|
+
if agent_id and t.get("agent_id") != agent_id:
|
|
1624
|
+
continue
|
|
1625
|
+
if team_id and t.get("team_id") != team_id:
|
|
1626
|
+
continue
|
|
1627
|
+
if workflow_id and t.get("workflow_id") != workflow_id:
|
|
1628
|
+
continue
|
|
1629
|
+
|
|
1630
|
+
created_at = t.get("created_at", "")
|
|
1631
|
+
if start_time and created_at < start_time.isoformat():
|
|
1632
|
+
continue
|
|
1633
|
+
if end_time and created_at > end_time.isoformat():
|
|
1634
|
+
continue
|
|
1635
|
+
|
|
1636
|
+
if session_id not in session_stats:
|
|
1637
|
+
session_stats[session_id] = {
|
|
1638
|
+
"session_id": session_id,
|
|
1639
|
+
"user_id": t.get("user_id"),
|
|
1640
|
+
"agent_id": t.get("agent_id"),
|
|
1641
|
+
"team_id": t.get("team_id"),
|
|
1642
|
+
"workflow_id": t.get("workflow_id"),
|
|
1643
|
+
"total_traces": 0,
|
|
1644
|
+
"first_trace_at": created_at,
|
|
1645
|
+
"last_trace_at": created_at,
|
|
1646
|
+
}
|
|
1647
|
+
|
|
1648
|
+
session_stats[session_id]["total_traces"] += 1
|
|
1649
|
+
if created_at < session_stats[session_id]["first_trace_at"]:
|
|
1650
|
+
session_stats[session_id]["first_trace_at"] = created_at
|
|
1651
|
+
if created_at > session_stats[session_id]["last_trace_at"]:
|
|
1652
|
+
session_stats[session_id]["last_trace_at"] = created_at
|
|
1653
|
+
|
|
1654
|
+
stats_list = list(session_stats.values())
|
|
1655
|
+
total_count = len(stats_list)
|
|
1656
|
+
|
|
1657
|
+
# Sort by last_trace_at desc
|
|
1658
|
+
stats_list.sort(key=lambda x: x.get("last_trace_at", ""), reverse=True)
|
|
1659
|
+
|
|
1660
|
+
# Apply pagination
|
|
1661
|
+
if limit and page:
|
|
1662
|
+
start_idx = (page - 1) * limit
|
|
1663
|
+
stats_list = stats_list[start_idx : start_idx + limit]
|
|
1664
|
+
|
|
1665
|
+
# Convert ISO strings to datetime objects
|
|
1666
|
+
for stat in stats_list:
|
|
1667
|
+
first_at = stat.get("first_trace_at", "")
|
|
1668
|
+
last_at = stat.get("last_trace_at", "")
|
|
1669
|
+
if first_at:
|
|
1670
|
+
stat["first_trace_at"] = datetime.fromisoformat(first_at.replace("Z", "+00:00"))
|
|
1671
|
+
if last_at:
|
|
1672
|
+
stat["last_trace_at"] = datetime.fromisoformat(last_at.replace("Z", "+00:00"))
|
|
1673
|
+
|
|
1674
|
+
return stats_list, total_count
|
|
1675
|
+
|
|
1676
|
+
except Exception as e:
|
|
1677
|
+
log_error(f"Error getting trace stats: {e}")
|
|
1678
|
+
return [], 0
|
|
1679
|
+
|
|
1680
|
+
# --- Spans ---
|
|
1681
|
+
def create_span(self, span: "Span") -> None:
|
|
1682
|
+
"""Create a single span in the database.
|
|
1683
|
+
|
|
1684
|
+
Args:
|
|
1685
|
+
span: The Span object to store.
|
|
1686
|
+
"""
|
|
1687
|
+
try:
|
|
1688
|
+
spans = self._read_json_file(self.span_table_name, create_table_if_not_found=True)
|
|
1689
|
+
spans.append(span.to_dict())
|
|
1690
|
+
self._write_json_file(self.span_table_name, spans)
|
|
1691
|
+
|
|
1692
|
+
except Exception as e:
|
|
1693
|
+
log_error(f"Error creating span: {e}")
|
|
1694
|
+
|
|
1695
|
+
def create_spans(self, spans: List) -> None:
|
|
1696
|
+
"""Create multiple spans in the database as a batch.
|
|
1697
|
+
|
|
1698
|
+
Args:
|
|
1699
|
+
spans: List of Span objects to store.
|
|
1700
|
+
"""
|
|
1701
|
+
if not spans:
|
|
1702
|
+
return
|
|
1703
|
+
|
|
1704
|
+
try:
|
|
1705
|
+
existing_spans = self._read_json_file(self.span_table_name, create_table_if_not_found=True)
|
|
1706
|
+
for span in spans:
|
|
1707
|
+
existing_spans.append(span.to_dict())
|
|
1708
|
+
self._write_json_file(self.span_table_name, existing_spans)
|
|
1709
|
+
|
|
1710
|
+
except Exception as e:
|
|
1711
|
+
log_error(f"Error creating spans batch: {e}")
|
|
1712
|
+
|
|
1713
|
+
def get_span(self, span_id: str):
|
|
1714
|
+
"""Get a single span by its span_id.
|
|
1715
|
+
|
|
1716
|
+
Args:
|
|
1717
|
+
span_id: The unique span identifier.
|
|
1718
|
+
|
|
1719
|
+
Returns:
|
|
1720
|
+
Optional[Span]: The span if found, None otherwise.
|
|
1721
|
+
"""
|
|
1722
|
+
try:
|
|
1723
|
+
from agno.tracing.schemas import Span
|
|
1724
|
+
|
|
1725
|
+
spans = self._read_json_file(self.span_table_name, create_table_if_not_found=False)
|
|
1726
|
+
|
|
1727
|
+
for s in spans:
|
|
1728
|
+
if s.get("span_id") == span_id:
|
|
1729
|
+
return Span.from_dict(s)
|
|
1730
|
+
|
|
1731
|
+
return None
|
|
1732
|
+
|
|
1733
|
+
except Exception as e:
|
|
1734
|
+
log_error(f"Error getting span: {e}")
|
|
1735
|
+
return None
|
|
1736
|
+
|
|
1737
|
+
def get_spans(
|
|
1738
|
+
self,
|
|
1739
|
+
trace_id: Optional[str] = None,
|
|
1740
|
+
parent_span_id: Optional[str] = None,
|
|
1741
|
+
limit: Optional[int] = 1000,
|
|
1742
|
+
) -> List:
|
|
1743
|
+
"""Get spans matching the provided filters.
|
|
1744
|
+
|
|
1745
|
+
Args:
|
|
1746
|
+
trace_id: Filter by trace ID.
|
|
1747
|
+
parent_span_id: Filter by parent span ID.
|
|
1748
|
+
limit: Maximum number of spans to return.
|
|
1749
|
+
|
|
1750
|
+
Returns:
|
|
1751
|
+
List[Span]: List of matching spans.
|
|
1752
|
+
"""
|
|
1753
|
+
try:
|
|
1754
|
+
from agno.tracing.schemas import Span
|
|
1755
|
+
|
|
1756
|
+
spans = self._read_json_file(self.span_table_name, create_table_if_not_found=False)
|
|
1757
|
+
if not spans:
|
|
1758
|
+
return []
|
|
1759
|
+
|
|
1760
|
+
# Apply filters
|
|
1761
|
+
filtered = []
|
|
1762
|
+
for s in spans:
|
|
1763
|
+
if trace_id and s.get("trace_id") != trace_id:
|
|
1764
|
+
continue
|
|
1765
|
+
if parent_span_id and s.get("parent_span_id") != parent_span_id:
|
|
1766
|
+
continue
|
|
1767
|
+
filtered.append(s)
|
|
1768
|
+
|
|
1769
|
+
# Apply limit
|
|
1770
|
+
if limit:
|
|
1771
|
+
filtered = filtered[:limit]
|
|
1772
|
+
|
|
1773
|
+
return [Span.from_dict(s) for s in filtered]
|
|
1774
|
+
|
|
1775
|
+
except Exception as e:
|
|
1776
|
+
log_error(f"Error getting spans: {e}")
|
|
1777
|
+
return []
|
|
1778
|
+
|
|
1779
|
+
# -- Learning methods (stubs) --
|
|
1780
|
+
def get_learning(
|
|
1781
|
+
self,
|
|
1782
|
+
learning_type: str,
|
|
1783
|
+
user_id: Optional[str] = None,
|
|
1784
|
+
agent_id: Optional[str] = None,
|
|
1785
|
+
team_id: Optional[str] = None,
|
|
1786
|
+
session_id: Optional[str] = None,
|
|
1787
|
+
namespace: Optional[str] = None,
|
|
1788
|
+
entity_id: Optional[str] = None,
|
|
1789
|
+
entity_type: Optional[str] = None,
|
|
1790
|
+
) -> Optional[Dict[str, Any]]:
|
|
1791
|
+
raise NotImplementedError("Learning methods not yet implemented for JsonDb")
|
|
1792
|
+
|
|
1793
|
+
def upsert_learning(
|
|
1794
|
+
self,
|
|
1795
|
+
id: str,
|
|
1796
|
+
learning_type: str,
|
|
1797
|
+
content: Dict[str, Any],
|
|
1798
|
+
user_id: Optional[str] = None,
|
|
1799
|
+
agent_id: Optional[str] = None,
|
|
1800
|
+
team_id: Optional[str] = None,
|
|
1801
|
+
session_id: Optional[str] = None,
|
|
1802
|
+
namespace: Optional[str] = None,
|
|
1803
|
+
entity_id: Optional[str] = None,
|
|
1804
|
+
entity_type: Optional[str] = None,
|
|
1805
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
1806
|
+
) -> None:
|
|
1807
|
+
raise NotImplementedError("Learning methods not yet implemented for JsonDb")
|
|
1808
|
+
|
|
1809
|
+
def delete_learning(self, id: str) -> bool:
|
|
1810
|
+
raise NotImplementedError("Learning methods not yet implemented for JsonDb")
|
|
1811
|
+
|
|
1812
|
+
def get_learnings(
|
|
1813
|
+
self,
|
|
1814
|
+
learning_type: Optional[str] = None,
|
|
1815
|
+
user_id: Optional[str] = None,
|
|
1816
|
+
agent_id: Optional[str] = None,
|
|
1817
|
+
team_id: Optional[str] = None,
|
|
1818
|
+
session_id: Optional[str] = None,
|
|
1819
|
+
namespace: Optional[str] = None,
|
|
1820
|
+
entity_id: Optional[str] = None,
|
|
1821
|
+
entity_type: Optional[str] = None,
|
|
1822
|
+
limit: Optional[int] = None,
|
|
1823
|
+
) -> List[Dict[str, Any]]:
|
|
1824
|
+
raise NotImplementedError("Learning methods not yet implemented for JsonDb")
|
agno/db/json/utils.py
CHANGED
|
@@ -6,6 +6,7 @@ from typing import Any, Dict, List, Optional
|
|
|
6
6
|
from uuid import uuid4
|
|
7
7
|
|
|
8
8
|
from agno.db.schemas.culture import CulturalKnowledge
|
|
9
|
+
from agno.db.utils import get_sort_value
|
|
9
10
|
from agno.utils.log import log_debug
|
|
10
11
|
|
|
11
12
|
|
|
@@ -21,6 +22,9 @@ def apply_sorting(
|
|
|
21
22
|
|
|
22
23
|
Returns:
|
|
23
24
|
The sorted list
|
|
25
|
+
|
|
26
|
+
Note:
|
|
27
|
+
If sorting by "updated_at", will fallback to "created_at" in case of None.
|
|
24
28
|
"""
|
|
25
29
|
if sort_by is None or not data:
|
|
26
30
|
return data
|
|
@@ -31,8 +35,16 @@ def apply_sorting(
|
|
|
31
35
|
return data
|
|
32
36
|
|
|
33
37
|
try:
|
|
34
|
-
|
|
35
|
-
|
|
38
|
+
is_descending = sort_order != "asc" if sort_order else True
|
|
39
|
+
|
|
40
|
+
# Sort using the helper function that handles updated_at -> created_at fallback
|
|
41
|
+
sorted_records = sorted(
|
|
42
|
+
data,
|
|
43
|
+
key=lambda x: (get_sort_value(x, sort_by) is None, get_sort_value(x, sort_by)),
|
|
44
|
+
reverse=is_descending,
|
|
45
|
+
)
|
|
46
|
+
|
|
47
|
+
return sorted_records
|
|
36
48
|
except Exception as e:
|
|
37
49
|
log_debug(f"Error sorting data by '{sort_by}': {e}")
|
|
38
50
|
return data
|