agno 2.3.4__py3-none-any.whl → 2.3.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agno/agent/agent.py +177 -41
- agno/culture/manager.py +2 -2
- agno/db/base.py +330 -8
- agno/db/dynamo/dynamo.py +722 -2
- agno/db/dynamo/schemas.py +127 -0
- agno/db/firestore/firestore.py +573 -1
- agno/db/firestore/schemas.py +40 -0
- agno/db/gcs_json/gcs_json_db.py +446 -1
- agno/db/in_memory/in_memory_db.py +143 -1
- agno/db/json/json_db.py +438 -1
- agno/db/mongo/async_mongo.py +522 -0
- agno/db/mongo/mongo.py +523 -1
- agno/db/mongo/schemas.py +29 -0
- agno/db/mysql/mysql.py +536 -3
- agno/db/mysql/schemas.py +38 -0
- agno/db/postgres/async_postgres.py +541 -13
- agno/db/postgres/postgres.py +535 -2
- agno/db/postgres/schemas.py +38 -0
- agno/db/redis/redis.py +468 -1
- agno/db/redis/schemas.py +32 -0
- agno/db/singlestore/schemas.py +38 -0
- agno/db/singlestore/singlestore.py +523 -1
- agno/db/sqlite/async_sqlite.py +548 -9
- agno/db/sqlite/schemas.py +38 -0
- agno/db/sqlite/sqlite.py +537 -5
- agno/db/sqlite/utils.py +6 -8
- agno/db/surrealdb/models.py +25 -0
- agno/db/surrealdb/surrealdb.py +548 -1
- agno/eval/accuracy.py +10 -4
- agno/eval/performance.py +10 -4
- agno/eval/reliability.py +22 -13
- agno/exceptions.py +11 -0
- agno/hooks/__init__.py +3 -0
- agno/hooks/decorator.py +164 -0
- agno/knowledge/chunking/semantic.py +2 -2
- agno/models/aimlapi/aimlapi.py +2 -3
- agno/models/anthropic/claude.py +18 -13
- agno/models/aws/bedrock.py +3 -4
- agno/models/aws/claude.py +5 -1
- agno/models/azure/ai_foundry.py +2 -2
- agno/models/azure/openai_chat.py +8 -0
- agno/models/cerebras/cerebras.py +63 -11
- agno/models/cerebras/cerebras_openai.py +2 -3
- agno/models/cohere/chat.py +1 -5
- agno/models/cometapi/cometapi.py +2 -3
- agno/models/dashscope/dashscope.py +2 -3
- agno/models/deepinfra/deepinfra.py +2 -3
- agno/models/deepseek/deepseek.py +2 -3
- agno/models/fireworks/fireworks.py +2 -3
- agno/models/google/gemini.py +9 -7
- agno/models/groq/groq.py +2 -3
- agno/models/huggingface/huggingface.py +1 -5
- agno/models/ibm/watsonx.py +1 -5
- agno/models/internlm/internlm.py +2 -3
- agno/models/langdb/langdb.py +6 -4
- agno/models/litellm/chat.py +2 -2
- agno/models/litellm/litellm_openai.py +2 -3
- agno/models/meta/llama.py +1 -5
- agno/models/meta/llama_openai.py +4 -5
- agno/models/mistral/mistral.py +1 -5
- agno/models/nebius/nebius.py +2 -3
- agno/models/nvidia/nvidia.py +4 -5
- agno/models/openai/chat.py +14 -3
- agno/models/openai/responses.py +14 -3
- agno/models/openrouter/openrouter.py +4 -5
- agno/models/perplexity/perplexity.py +2 -3
- agno/models/portkey/portkey.py +7 -6
- agno/models/requesty/requesty.py +4 -5
- agno/models/response.py +2 -1
- agno/models/sambanova/sambanova.py +4 -5
- agno/models/siliconflow/siliconflow.py +3 -4
- agno/models/together/together.py +4 -5
- agno/models/vercel/v0.py +4 -5
- agno/models/vllm/vllm.py +19 -14
- agno/models/xai/xai.py +4 -5
- agno/os/app.py +104 -0
- agno/os/config.py +13 -0
- agno/os/interfaces/whatsapp/router.py +0 -1
- agno/os/mcp.py +1 -0
- agno/os/router.py +31 -0
- agno/os/routers/traces/__init__.py +3 -0
- agno/os/routers/traces/schemas.py +414 -0
- agno/os/routers/traces/traces.py +499 -0
- agno/os/schema.py +10 -1
- agno/os/utils.py +57 -0
- agno/run/agent.py +1 -0
- agno/run/base.py +17 -0
- agno/run/team.py +4 -0
- agno/session/team.py +1 -0
- agno/table.py +10 -0
- agno/team/team.py +214 -65
- agno/tools/function.py +10 -8
- agno/tools/nano_banana.py +1 -1
- agno/tracing/__init__.py +12 -0
- agno/tracing/exporter.py +157 -0
- agno/tracing/schemas.py +276 -0
- agno/tracing/setup.py +111 -0
- agno/utils/agent.py +4 -4
- agno/utils/hooks.py +56 -1
- agno/vectordb/qdrant/qdrant.py +22 -22
- agno/workflow/condition.py +8 -0
- agno/workflow/loop.py +8 -0
- agno/workflow/parallel.py +8 -0
- agno/workflow/router.py +8 -0
- agno/workflow/step.py +20 -0
- agno/workflow/steps.py +8 -0
- agno/workflow/workflow.py +83 -17
- {agno-2.3.4.dist-info → agno-2.3.5.dist-info}/METADATA +2 -2
- {agno-2.3.4.dist-info → agno-2.3.5.dist-info}/RECORD +112 -102
- {agno-2.3.4.dist-info → agno-2.3.5.dist-info}/WHEEL +0 -0
- {agno-2.3.4.dist-info → agno-2.3.5.dist-info}/licenses/LICENSE +0 -0
- {agno-2.3.4.dist-info → agno-2.3.5.dist-info}/top_level.txt +0 -0
agno/tools/function.py
CHANGED
|
@@ -897,7 +897,7 @@ class FunctionCall(BaseModel):
|
|
|
897
897
|
return FunctionExecutionResult(status="success", result=cached_result)
|
|
898
898
|
|
|
899
899
|
# Execute function
|
|
900
|
-
execution_result
|
|
900
|
+
execution_result: FunctionExecutionResult
|
|
901
901
|
exception_to_raise = None
|
|
902
902
|
|
|
903
903
|
try:
|
|
@@ -939,6 +939,7 @@ class FunctionCall(BaseModel):
|
|
|
939
939
|
log_debug(f"{e.__class__.__name__}: {e}")
|
|
940
940
|
self.error = str(e)
|
|
941
941
|
exception_to_raise = e
|
|
942
|
+
execution_result = FunctionExecutionResult(status="failure", error=str(e))
|
|
942
943
|
except Exception as e:
|
|
943
944
|
log_warning(f"Could not run function {self.get_call_str()}")
|
|
944
945
|
log_exception(e)
|
|
@@ -948,10 +949,10 @@ class FunctionCall(BaseModel):
|
|
|
948
949
|
finally:
|
|
949
950
|
self._handle_post_hook()
|
|
950
951
|
|
|
951
|
-
|
|
952
|
-
|
|
952
|
+
if exception_to_raise is not None:
|
|
953
|
+
raise exception_to_raise
|
|
953
954
|
|
|
954
|
-
|
|
955
|
+
return execution_result
|
|
955
956
|
|
|
956
957
|
async def _handle_pre_hook_async(self):
|
|
957
958
|
"""Handles the async pre-hook for the function call."""
|
|
@@ -1117,7 +1118,7 @@ class FunctionCall(BaseModel):
|
|
|
1117
1118
|
return FunctionExecutionResult(status="success", result=cached_result)
|
|
1118
1119
|
|
|
1119
1120
|
# Execute function
|
|
1120
|
-
execution_result
|
|
1121
|
+
execution_result: FunctionExecutionResult
|
|
1121
1122
|
exception_to_raise = None
|
|
1122
1123
|
|
|
1123
1124
|
try:
|
|
@@ -1159,6 +1160,7 @@ class FunctionCall(BaseModel):
|
|
|
1159
1160
|
log_debug(f"{e.__class__.__name__}: {e}")
|
|
1160
1161
|
self.error = str(e)
|
|
1161
1162
|
exception_to_raise = e
|
|
1163
|
+
execution_result = FunctionExecutionResult(status="failure", error=str(e))
|
|
1162
1164
|
except Exception as e:
|
|
1163
1165
|
log_warning(f"Could not run function {self.get_call_str()}")
|
|
1164
1166
|
log_exception(e)
|
|
@@ -1171,10 +1173,10 @@ class FunctionCall(BaseModel):
|
|
|
1171
1173
|
else:
|
|
1172
1174
|
self._handle_post_hook()
|
|
1173
1175
|
|
|
1174
|
-
|
|
1175
|
-
|
|
1176
|
+
if exception_to_raise is not None:
|
|
1177
|
+
raise exception_to_raise
|
|
1176
1178
|
|
|
1177
|
-
|
|
1179
|
+
return execution_result
|
|
1178
1180
|
|
|
1179
1181
|
|
|
1180
1182
|
class ToolResult(BaseModel):
|
agno/tools/nano_banana.py
CHANGED
agno/tracing/__init__.py
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Agno Tracing Module
|
|
3
|
+
|
|
4
|
+
This module provides OpenTelemetry-based tracing capabilities for Agno agents.
|
|
5
|
+
It uses the openinference-instrumentation-agno package for automatic instrumentation
|
|
6
|
+
and provides a custom DatabaseSpanExporter to store traces in the Agno database.
|
|
7
|
+
"""
|
|
8
|
+
|
|
9
|
+
from agno.tracing.exporter import DatabaseSpanExporter
|
|
10
|
+
from agno.tracing.setup import setup_tracing
|
|
11
|
+
|
|
12
|
+
__all__ = ["DatabaseSpanExporter", "setup_tracing"]
|
agno/tracing/exporter.py
ADDED
|
@@ -0,0 +1,157 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Custom OpenTelemetry SpanExporter that writes traces to Agno database.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import asyncio
|
|
6
|
+
from collections import defaultdict
|
|
7
|
+
from typing import Dict, List, Sequence, Union
|
|
8
|
+
|
|
9
|
+
from opentelemetry.sdk.trace import ReadableSpan # type: ignore
|
|
10
|
+
from opentelemetry.sdk.trace.export import SpanExporter, SpanExportResult # type: ignore
|
|
11
|
+
|
|
12
|
+
from agno.db.base import AsyncBaseDb, BaseDb
|
|
13
|
+
from agno.tracing.schemas import Span, create_trace_from_spans
|
|
14
|
+
from agno.utils.log import logger
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class DatabaseSpanExporter(SpanExporter):
|
|
18
|
+
"""Custom OpenTelemetry SpanExporter that writes to Agno database"""
|
|
19
|
+
|
|
20
|
+
def __init__(self, db: Union[BaseDb, AsyncBaseDb]):
|
|
21
|
+
"""
|
|
22
|
+
Initialize the DatabaseSpanExporter.
|
|
23
|
+
|
|
24
|
+
Args:
|
|
25
|
+
db: Database instance (sync or async) to store traces
|
|
26
|
+
"""
|
|
27
|
+
self.db = db
|
|
28
|
+
self._shutdown = False
|
|
29
|
+
|
|
30
|
+
def export(self, spans: Sequence[ReadableSpan]) -> SpanExportResult:
|
|
31
|
+
"""
|
|
32
|
+
Export spans to the database.
|
|
33
|
+
|
|
34
|
+
This method:
|
|
35
|
+
1. Converts OpenTelemetry spans to Span objects
|
|
36
|
+
2. Groups spans by trace_id
|
|
37
|
+
3. Creates Trace records (one per trace_id)
|
|
38
|
+
4. Creates Span records (multiple per trace_id)
|
|
39
|
+
|
|
40
|
+
Args:
|
|
41
|
+
spans: Sequence of OpenTelemetry ReadableSpan objects
|
|
42
|
+
|
|
43
|
+
Returns:
|
|
44
|
+
SpanExportResult indicating success or failure
|
|
45
|
+
"""
|
|
46
|
+
if self._shutdown:
|
|
47
|
+
logger.warning("DatabaseSpanExporter is shutdown, cannot export spans")
|
|
48
|
+
return SpanExportResult.FAILURE
|
|
49
|
+
|
|
50
|
+
if not spans:
|
|
51
|
+
return SpanExportResult.SUCCESS
|
|
52
|
+
|
|
53
|
+
try:
|
|
54
|
+
# Convert OpenTelemetry spans to Span objects
|
|
55
|
+
converted_spans: List[Span] = []
|
|
56
|
+
for span in spans:
|
|
57
|
+
try:
|
|
58
|
+
converted_span = Span.from_otel_span(span)
|
|
59
|
+
converted_spans.append(converted_span)
|
|
60
|
+
except Exception as e:
|
|
61
|
+
logger.error(f"Failed to convert span {span.name}: {e}")
|
|
62
|
+
# Continue processing other spans
|
|
63
|
+
continue
|
|
64
|
+
|
|
65
|
+
if not converted_spans:
|
|
66
|
+
return SpanExportResult.SUCCESS
|
|
67
|
+
|
|
68
|
+
# Group spans by trace_id
|
|
69
|
+
spans_by_trace: Dict[str, List[Span]] = defaultdict(list)
|
|
70
|
+
for converted_span in converted_spans:
|
|
71
|
+
spans_by_trace[converted_span.trace_id].append(converted_span)
|
|
72
|
+
|
|
73
|
+
# Handle async DB
|
|
74
|
+
if isinstance(self.db, AsyncBaseDb):
|
|
75
|
+
self._export_async(spans_by_trace)
|
|
76
|
+
else:
|
|
77
|
+
# Synchronous database
|
|
78
|
+
self._export_sync(spans_by_trace)
|
|
79
|
+
|
|
80
|
+
return SpanExportResult.SUCCESS
|
|
81
|
+
except Exception as e:
|
|
82
|
+
logger.error(f"Failed to export spans to database: {e}", exc_info=True)
|
|
83
|
+
return SpanExportResult.FAILURE
|
|
84
|
+
|
|
85
|
+
def _export_sync(self, spans_by_trace: Dict[str, List[Span]]) -> None:
|
|
86
|
+
"""Export traces and spans to synchronous database"""
|
|
87
|
+
try:
|
|
88
|
+
# Create trace and span records for each trace
|
|
89
|
+
for trace_id, spans in spans_by_trace.items():
|
|
90
|
+
# Create trace record (aggregate of all spans)
|
|
91
|
+
trace = create_trace_from_spans(spans)
|
|
92
|
+
if trace:
|
|
93
|
+
self.db.create_trace(trace)
|
|
94
|
+
|
|
95
|
+
# Create span records
|
|
96
|
+
self.db.create_spans(spans)
|
|
97
|
+
|
|
98
|
+
except Exception as e:
|
|
99
|
+
logger.error(f"Failed to export sync traces: {e}", exc_info=True)
|
|
100
|
+
raise
|
|
101
|
+
|
|
102
|
+
def _export_async(self, spans_by_trace: Dict[str, List[Span]]) -> None:
|
|
103
|
+
"""Handle async database export"""
|
|
104
|
+
try:
|
|
105
|
+
loop = asyncio.get_event_loop()
|
|
106
|
+
if loop.is_running():
|
|
107
|
+
# We're in an async context, schedule the coroutine
|
|
108
|
+
asyncio.create_task(self._do_async_export(spans_by_trace))
|
|
109
|
+
else:
|
|
110
|
+
# No running loop, run in new loop
|
|
111
|
+
loop.run_until_complete(self._do_async_export(spans_by_trace))
|
|
112
|
+
except RuntimeError:
|
|
113
|
+
# No event loop, create new one
|
|
114
|
+
try:
|
|
115
|
+
asyncio.run(self._do_async_export(spans_by_trace))
|
|
116
|
+
except Exception as e:
|
|
117
|
+
logger.error(f"Failed to export async traces: {e}", exc_info=True)
|
|
118
|
+
|
|
119
|
+
async def _do_async_export(self, spans_by_trace: Dict[str, List[Span]]) -> None:
|
|
120
|
+
"""Actually perform the async export"""
|
|
121
|
+
try:
|
|
122
|
+
# Create trace and span records for each trace
|
|
123
|
+
for trace_id, spans in spans_by_trace.items():
|
|
124
|
+
# Create trace record (aggregate of all spans)
|
|
125
|
+
trace = create_trace_from_spans(spans)
|
|
126
|
+
if trace:
|
|
127
|
+
create_trace_result = self.db.create_trace(trace)
|
|
128
|
+
if create_trace_result is not None:
|
|
129
|
+
await create_trace_result
|
|
130
|
+
|
|
131
|
+
# Create span records
|
|
132
|
+
create_spans_result = self.db.create_spans(spans)
|
|
133
|
+
if create_spans_result is not None:
|
|
134
|
+
await create_spans_result
|
|
135
|
+
|
|
136
|
+
except Exception as e:
|
|
137
|
+
logger.error(f"Failed to do async export: {e}", exc_info=True)
|
|
138
|
+
raise
|
|
139
|
+
|
|
140
|
+
def shutdown(self) -> None:
|
|
141
|
+
"""Shutdown the exporter"""
|
|
142
|
+
self._shutdown = True
|
|
143
|
+
logger.debug("DatabaseSpanExporter shutdown")
|
|
144
|
+
|
|
145
|
+
def force_flush(self, timeout_millis: int = 30000) -> bool:
|
|
146
|
+
"""
|
|
147
|
+
Force flush any pending spans.
|
|
148
|
+
|
|
149
|
+
Since we write immediately to the database, this is a no-op.
|
|
150
|
+
|
|
151
|
+
Args:
|
|
152
|
+
timeout_millis: Timeout in milliseconds
|
|
153
|
+
|
|
154
|
+
Returns:
|
|
155
|
+
True if flush was successful
|
|
156
|
+
"""
|
|
157
|
+
return True
|
agno/tracing/schemas.py
ADDED
|
@@ -0,0 +1,276 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Trace data models for Agno tracing.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from dataclasses import asdict, dataclass
|
|
6
|
+
from datetime import datetime, timezone
|
|
7
|
+
from typing import Any, Dict, List, Optional
|
|
8
|
+
|
|
9
|
+
from opentelemetry.sdk.trace import ReadableSpan # type: ignore
|
|
10
|
+
from opentelemetry.trace import SpanKind, StatusCode # type: ignore
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@dataclass
|
|
14
|
+
class Trace:
|
|
15
|
+
"""Represents a complete trace (one record per trace_id)"""
|
|
16
|
+
|
|
17
|
+
trace_id: str
|
|
18
|
+
name: str # Name from root span
|
|
19
|
+
status: str # Overall status: OK, ERROR, UNSET
|
|
20
|
+
start_time: datetime # Python datetime object
|
|
21
|
+
end_time: datetime # Python datetime object
|
|
22
|
+
duration_ms: int
|
|
23
|
+
total_spans: int
|
|
24
|
+
error_count: int
|
|
25
|
+
|
|
26
|
+
# Context from root span
|
|
27
|
+
run_id: Optional[str]
|
|
28
|
+
session_id: Optional[str]
|
|
29
|
+
user_id: Optional[str]
|
|
30
|
+
agent_id: Optional[str]
|
|
31
|
+
team_id: Optional[str]
|
|
32
|
+
workflow_id: Optional[str]
|
|
33
|
+
|
|
34
|
+
created_at: datetime # Python datetime object
|
|
35
|
+
|
|
36
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
37
|
+
"""Convert Trace to dictionary for database storage (datetime -> ISO string)"""
|
|
38
|
+
data = asdict(self)
|
|
39
|
+
# Convert datetime objects to ISO format strings for database storage
|
|
40
|
+
data["start_time"] = self.start_time.isoformat()
|
|
41
|
+
data["end_time"] = self.end_time.isoformat()
|
|
42
|
+
data["created_at"] = self.created_at.isoformat()
|
|
43
|
+
return data
|
|
44
|
+
|
|
45
|
+
@classmethod
|
|
46
|
+
def from_dict(cls, data: Dict[str, Any]) -> "Trace":
|
|
47
|
+
"""Create Trace from dictionary (ISO string -> datetime)"""
|
|
48
|
+
# Convert ISO format strings to datetime objects
|
|
49
|
+
start_time = data["start_time"]
|
|
50
|
+
if isinstance(start_time, str):
|
|
51
|
+
start_time = datetime.fromisoformat(start_time.replace("Z", "+00:00"))
|
|
52
|
+
elif isinstance(start_time, int):
|
|
53
|
+
start_time = datetime.fromtimestamp(start_time / 1_000_000_000, tz=timezone.utc)
|
|
54
|
+
|
|
55
|
+
end_time = data["end_time"]
|
|
56
|
+
if isinstance(end_time, str):
|
|
57
|
+
end_time = datetime.fromisoformat(end_time.replace("Z", "+00:00"))
|
|
58
|
+
elif isinstance(end_time, int):
|
|
59
|
+
end_time = datetime.fromtimestamp(end_time / 1_000_000_000, tz=timezone.utc)
|
|
60
|
+
|
|
61
|
+
created_at = data["created_at"]
|
|
62
|
+
if isinstance(created_at, str):
|
|
63
|
+
created_at = datetime.fromisoformat(created_at.replace("Z", "+00:00"))
|
|
64
|
+
elif isinstance(created_at, int):
|
|
65
|
+
created_at = datetime.fromtimestamp(created_at, tz=timezone.utc)
|
|
66
|
+
|
|
67
|
+
return cls(
|
|
68
|
+
trace_id=data["trace_id"],
|
|
69
|
+
name=data["name"],
|
|
70
|
+
status=data["status"],
|
|
71
|
+
start_time=start_time,
|
|
72
|
+
end_time=end_time,
|
|
73
|
+
duration_ms=data["duration_ms"],
|
|
74
|
+
total_spans=data["total_spans"],
|
|
75
|
+
error_count=data["error_count"],
|
|
76
|
+
run_id=data.get("run_id"),
|
|
77
|
+
session_id=data.get("session_id"),
|
|
78
|
+
user_id=data.get("user_id"),
|
|
79
|
+
agent_id=data.get("agent_id"),
|
|
80
|
+
team_id=data.get("team_id"),
|
|
81
|
+
workflow_id=data.get("workflow_id"),
|
|
82
|
+
created_at=created_at,
|
|
83
|
+
)
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
@dataclass
|
|
87
|
+
class Span:
|
|
88
|
+
"""Represents a single span within a trace"""
|
|
89
|
+
|
|
90
|
+
span_id: str
|
|
91
|
+
trace_id: str
|
|
92
|
+
parent_span_id: Optional[str]
|
|
93
|
+
name: str
|
|
94
|
+
span_kind: str
|
|
95
|
+
status_code: str
|
|
96
|
+
status_message: Optional[str]
|
|
97
|
+
start_time: datetime # Python datetime object
|
|
98
|
+
end_time: datetime # Python datetime object
|
|
99
|
+
duration_ms: int
|
|
100
|
+
attributes: Dict[str, Any]
|
|
101
|
+
created_at: datetime # Python datetime object
|
|
102
|
+
|
|
103
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
104
|
+
"""Convert Span to dictionary for database storage (datetime -> ISO string)"""
|
|
105
|
+
data = asdict(self)
|
|
106
|
+
# Convert datetime objects to ISO format strings for database storage
|
|
107
|
+
data["start_time"] = self.start_time.isoformat()
|
|
108
|
+
data["end_time"] = self.end_time.isoformat()
|
|
109
|
+
data["created_at"] = self.created_at.isoformat()
|
|
110
|
+
return data
|
|
111
|
+
|
|
112
|
+
@classmethod
|
|
113
|
+
def from_dict(cls, data: Dict[str, Any]) -> "Span":
|
|
114
|
+
"""Create Span from dictionary (ISO string -> datetime)"""
|
|
115
|
+
# Convert ISO format strings to datetime objects
|
|
116
|
+
start_time = data["start_time"]
|
|
117
|
+
if isinstance(start_time, str):
|
|
118
|
+
start_time = datetime.fromisoformat(start_time.replace("Z", "+00:00"))
|
|
119
|
+
elif isinstance(start_time, int):
|
|
120
|
+
start_time = datetime.fromtimestamp(start_time / 1_000_000_000, tz=timezone.utc)
|
|
121
|
+
|
|
122
|
+
end_time = data["end_time"]
|
|
123
|
+
if isinstance(end_time, str):
|
|
124
|
+
end_time = datetime.fromisoformat(end_time.replace("Z", "+00:00"))
|
|
125
|
+
elif isinstance(end_time, int):
|
|
126
|
+
end_time = datetime.fromtimestamp(end_time / 1_000_000_000, tz=timezone.utc)
|
|
127
|
+
|
|
128
|
+
created_at = data["created_at"]
|
|
129
|
+
if isinstance(created_at, str):
|
|
130
|
+
created_at = datetime.fromisoformat(created_at.replace("Z", "+00:00"))
|
|
131
|
+
elif isinstance(created_at, int):
|
|
132
|
+
created_at = datetime.fromtimestamp(created_at, tz=timezone.utc)
|
|
133
|
+
|
|
134
|
+
return cls(
|
|
135
|
+
span_id=data["span_id"],
|
|
136
|
+
trace_id=data["trace_id"],
|
|
137
|
+
parent_span_id=data.get("parent_span_id"),
|
|
138
|
+
name=data["name"],
|
|
139
|
+
span_kind=data["span_kind"],
|
|
140
|
+
status_code=data["status_code"],
|
|
141
|
+
status_message=data.get("status_message"),
|
|
142
|
+
start_time=start_time,
|
|
143
|
+
end_time=end_time,
|
|
144
|
+
duration_ms=data["duration_ms"],
|
|
145
|
+
attributes=data.get("attributes", {}),
|
|
146
|
+
created_at=created_at,
|
|
147
|
+
)
|
|
148
|
+
|
|
149
|
+
@classmethod
|
|
150
|
+
def from_otel_span(cls, otel_span: ReadableSpan) -> "Span":
|
|
151
|
+
"""Convert OpenTelemetry ReadableSpan to Span"""
|
|
152
|
+
# Extract span context
|
|
153
|
+
span_context = otel_span.context
|
|
154
|
+
trace_id = format(span_context.trace_id, "032x") if span_context else "0" * 32
|
|
155
|
+
span_id = format(span_context.span_id, "016x") if span_context else "0" * 16
|
|
156
|
+
|
|
157
|
+
# Extract parent span ID if exists
|
|
158
|
+
parent_span_id = None
|
|
159
|
+
if otel_span.parent and otel_span.parent.span_id:
|
|
160
|
+
parent_span_id = format(otel_span.parent.span_id, "016x")
|
|
161
|
+
|
|
162
|
+
# Extract span kind
|
|
163
|
+
span_kind_map = {
|
|
164
|
+
SpanKind.INTERNAL: "INTERNAL",
|
|
165
|
+
SpanKind.SERVER: "SERVER",
|
|
166
|
+
SpanKind.CLIENT: "CLIENT",
|
|
167
|
+
SpanKind.PRODUCER: "PRODUCER",
|
|
168
|
+
SpanKind.CONSUMER: "CONSUMER",
|
|
169
|
+
}
|
|
170
|
+
span_kind = span_kind_map.get(otel_span.kind, "INTERNAL")
|
|
171
|
+
|
|
172
|
+
# Extract status
|
|
173
|
+
status_code_map = {
|
|
174
|
+
StatusCode.UNSET: "UNSET",
|
|
175
|
+
StatusCode.OK: "OK",
|
|
176
|
+
StatusCode.ERROR: "ERROR",
|
|
177
|
+
}
|
|
178
|
+
status_code = status_code_map.get(otel_span.status.status_code, "UNSET")
|
|
179
|
+
status_message = otel_span.status.description
|
|
180
|
+
|
|
181
|
+
# Calculate duration in milliseconds
|
|
182
|
+
start_time_ns = otel_span.start_time or 0
|
|
183
|
+
end_time_ns = otel_span.end_time or start_time_ns
|
|
184
|
+
duration_ms = int((end_time_ns - start_time_ns) / 1_000_000)
|
|
185
|
+
|
|
186
|
+
# Convert nanosecond timestamps to datetime objects
|
|
187
|
+
start_time = datetime.fromtimestamp(start_time_ns / 1_000_000_000, tz=timezone.utc)
|
|
188
|
+
end_time = datetime.fromtimestamp(end_time_ns / 1_000_000_000, tz=timezone.utc)
|
|
189
|
+
|
|
190
|
+
# Convert attributes to dictionary
|
|
191
|
+
attributes: Dict[str, Any] = {}
|
|
192
|
+
if otel_span.attributes:
|
|
193
|
+
for key, value in otel_span.attributes.items():
|
|
194
|
+
# Convert attribute values to JSON-serializable types
|
|
195
|
+
if isinstance(value, (str, int, float, bool, type(None))):
|
|
196
|
+
attributes[key] = value
|
|
197
|
+
elif isinstance(value, (list, tuple)):
|
|
198
|
+
attributes[key] = list(value)
|
|
199
|
+
else:
|
|
200
|
+
attributes[key] = str(value)
|
|
201
|
+
|
|
202
|
+
return cls(
|
|
203
|
+
span_id=span_id,
|
|
204
|
+
trace_id=trace_id,
|
|
205
|
+
parent_span_id=parent_span_id,
|
|
206
|
+
name=otel_span.name,
|
|
207
|
+
span_kind=span_kind,
|
|
208
|
+
status_code=status_code,
|
|
209
|
+
status_message=status_message,
|
|
210
|
+
start_time=start_time,
|
|
211
|
+
end_time=end_time,
|
|
212
|
+
duration_ms=duration_ms,
|
|
213
|
+
attributes=attributes,
|
|
214
|
+
created_at=datetime.now(timezone.utc),
|
|
215
|
+
)
|
|
216
|
+
|
|
217
|
+
|
|
218
|
+
def create_trace_from_spans(spans: List[Span]) -> Optional[Trace]:
|
|
219
|
+
"""
|
|
220
|
+
Create a Trace object from a list of Span objects with the same trace_id.
|
|
221
|
+
|
|
222
|
+
Args:
|
|
223
|
+
spans: List of Span objects belonging to the same trace
|
|
224
|
+
|
|
225
|
+
Returns:
|
|
226
|
+
Trace object with aggregated information, or None if spans list is empty
|
|
227
|
+
"""
|
|
228
|
+
if not spans:
|
|
229
|
+
return None
|
|
230
|
+
|
|
231
|
+
# Find root span (no parent)
|
|
232
|
+
root_span = next((s for s in spans if not s.parent_span_id), spans[0])
|
|
233
|
+
|
|
234
|
+
# Calculate aggregated metrics
|
|
235
|
+
trace_id = spans[0].trace_id
|
|
236
|
+
start_time = min(s.start_time for s in spans)
|
|
237
|
+
end_time = max(s.end_time for s in spans)
|
|
238
|
+
duration_ms = int((end_time - start_time).total_seconds() * 1000)
|
|
239
|
+
total_spans = len(spans)
|
|
240
|
+
error_count = sum(1 for s in spans if s.status_code == "ERROR")
|
|
241
|
+
|
|
242
|
+
# Determine overall status (ERROR if any span errored, OK otherwise)
|
|
243
|
+
status = "ERROR" if error_count > 0 else "OK"
|
|
244
|
+
|
|
245
|
+
# Extract context from root span's attributes
|
|
246
|
+
attrs = root_span.attributes
|
|
247
|
+
run_id = attrs.get("run_id") or attrs.get("agno.run.id")
|
|
248
|
+
|
|
249
|
+
session_id = attrs.get("session_id") or attrs.get("agno.session.id") or attrs.get("session.id")
|
|
250
|
+
|
|
251
|
+
user_id = attrs.get("user_id") or attrs.get("agno.user.id") or attrs.get("user.id")
|
|
252
|
+
|
|
253
|
+
# Try to extract agent_id from the span name or attributes
|
|
254
|
+
agent_id = attrs.get("agent_id") or attrs.get("agno.agent.id")
|
|
255
|
+
|
|
256
|
+
team_id = attrs.get("team_id") or attrs.get("agno.team.id")
|
|
257
|
+
|
|
258
|
+
workflow_id = attrs.get("workflow_id") or attrs.get("agno.workflow.id")
|
|
259
|
+
|
|
260
|
+
return Trace(
|
|
261
|
+
trace_id=trace_id,
|
|
262
|
+
name=root_span.name,
|
|
263
|
+
status=status,
|
|
264
|
+
start_time=start_time,
|
|
265
|
+
end_time=end_time,
|
|
266
|
+
duration_ms=duration_ms,
|
|
267
|
+
total_spans=total_spans,
|
|
268
|
+
error_count=error_count,
|
|
269
|
+
run_id=run_id,
|
|
270
|
+
session_id=session_id,
|
|
271
|
+
user_id=user_id,
|
|
272
|
+
agent_id=agent_id,
|
|
273
|
+
team_id=team_id,
|
|
274
|
+
workflow_id=workflow_id,
|
|
275
|
+
created_at=datetime.now(timezone.utc),
|
|
276
|
+
)
|
agno/tracing/setup.py
ADDED
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Setup helper functions for configuring Agno tracing.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from typing import Union
|
|
6
|
+
|
|
7
|
+
from agno.db.base import AsyncBaseDb, BaseDb
|
|
8
|
+
from agno.tracing.exporter import DatabaseSpanExporter
|
|
9
|
+
from agno.utils.log import logger
|
|
10
|
+
|
|
11
|
+
try:
|
|
12
|
+
from openinference.instrumentation.agno import AgnoInstrumentor # type: ignore
|
|
13
|
+
from opentelemetry import trace as trace_api # type: ignore
|
|
14
|
+
from opentelemetry.sdk.trace import TracerProvider # type: ignore
|
|
15
|
+
from opentelemetry.sdk.trace.export import BatchSpanProcessor, SimpleSpanProcessor, SpanProcessor # type: ignore
|
|
16
|
+
|
|
17
|
+
OPENTELEMETRY_AVAILABLE = True
|
|
18
|
+
except ImportError:
|
|
19
|
+
OPENTELEMETRY_AVAILABLE = False
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def setup_tracing(
|
|
23
|
+
db: Union[BaseDb, AsyncBaseDb],
|
|
24
|
+
batch_processing: bool = False,
|
|
25
|
+
max_queue_size: int = 2048,
|
|
26
|
+
max_export_batch_size: int = 512,
|
|
27
|
+
schedule_delay_millis: int = 5000,
|
|
28
|
+
) -> None:
|
|
29
|
+
"""
|
|
30
|
+
Set up OpenTelemetry tracing with database export for Agno agents.
|
|
31
|
+
|
|
32
|
+
This function configures automatic tracing for all Agno agents, teams, and workflows.
|
|
33
|
+
Traces are automatically captured for:
|
|
34
|
+
- Agent runs (agent.run, agent.arun)
|
|
35
|
+
- Model calls (model.response)
|
|
36
|
+
- Tool executions
|
|
37
|
+
- Team coordination
|
|
38
|
+
- Workflow steps
|
|
39
|
+
|
|
40
|
+
Args:
|
|
41
|
+
db: Database instance to store traces (sync or async)
|
|
42
|
+
batch_processing: If True, use BatchSpanProcessor for better performance
|
|
43
|
+
If False, use SimpleSpanProcessor (immediate export)
|
|
44
|
+
max_queue_size: Maximum queue size for batch processor
|
|
45
|
+
max_export_batch_size: Maximum batch size for export
|
|
46
|
+
schedule_delay_millis: Delay in milliseconds between batch exports
|
|
47
|
+
|
|
48
|
+
Raises:
|
|
49
|
+
ImportError: If OpenTelemetry packages are not installed
|
|
50
|
+
|
|
51
|
+
Example:
|
|
52
|
+
```python
|
|
53
|
+
from agno.db.sqlite import SqliteDb
|
|
54
|
+
from agno.tracing import setup_tracing
|
|
55
|
+
|
|
56
|
+
db = SqliteDb(db_file="tmp/traces.db")
|
|
57
|
+
setup_tracing(db=db)
|
|
58
|
+
|
|
59
|
+
# Now all agents will be automatically traced
|
|
60
|
+
agent = Agent(...)
|
|
61
|
+
agent.run("Hello") # This will be traced automatically
|
|
62
|
+
```
|
|
63
|
+
"""
|
|
64
|
+
if not OPENTELEMETRY_AVAILABLE:
|
|
65
|
+
raise ImportError(
|
|
66
|
+
"OpenTelemetry packages are required for tracing. "
|
|
67
|
+
"Install with: pip install opentelemetry-api opentelemetry-sdk openinference-instrumentation-agno"
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
# Check if tracing is already set up (handles reload scenarios)
|
|
71
|
+
current_provider = trace_api.get_tracer_provider()
|
|
72
|
+
if isinstance(current_provider, TracerProvider):
|
|
73
|
+
# Already configured with a real TracerProvider, skip
|
|
74
|
+
return
|
|
75
|
+
|
|
76
|
+
try:
|
|
77
|
+
# Create tracer provider
|
|
78
|
+
tracer_provider = TracerProvider()
|
|
79
|
+
|
|
80
|
+
# Create database exporter
|
|
81
|
+
exporter = DatabaseSpanExporter(db=db)
|
|
82
|
+
|
|
83
|
+
# Configure span processor
|
|
84
|
+
processor: SpanProcessor
|
|
85
|
+
if batch_processing:
|
|
86
|
+
processor = BatchSpanProcessor(
|
|
87
|
+
exporter,
|
|
88
|
+
max_queue_size=max_queue_size,
|
|
89
|
+
max_export_batch_size=max_export_batch_size,
|
|
90
|
+
schedule_delay_millis=schedule_delay_millis,
|
|
91
|
+
)
|
|
92
|
+
logger.debug(
|
|
93
|
+
f"Tracing configured with BatchSpanProcessor "
|
|
94
|
+
f"(queue_size={max_queue_size}, batch_size={max_export_batch_size})"
|
|
95
|
+
)
|
|
96
|
+
else:
|
|
97
|
+
processor = SimpleSpanProcessor(exporter)
|
|
98
|
+
logger.debug("Tracing configured with SimpleSpanProcessor")
|
|
99
|
+
|
|
100
|
+
tracer_provider.add_span_processor(processor)
|
|
101
|
+
|
|
102
|
+
# Set the global tracer provider
|
|
103
|
+
trace_api.set_tracer_provider(tracer_provider)
|
|
104
|
+
|
|
105
|
+
# Instrument Agno with OpenInference
|
|
106
|
+
AgnoInstrumentor().instrument(tracer_provider=tracer_provider)
|
|
107
|
+
|
|
108
|
+
logger.info("Agno tracing successfully set up with database storage")
|
|
109
|
+
except Exception as e:
|
|
110
|
+
logger.error(f"Failed to set up tracing: {e}", exc_info=True)
|
|
111
|
+
raise
|
agno/utils/agent.py
CHANGED
|
@@ -24,7 +24,7 @@ if TYPE_CHECKING:
|
|
|
24
24
|
from agno.team.team import Team
|
|
25
25
|
|
|
26
26
|
|
|
27
|
-
async def
|
|
27
|
+
async def await_for_open_threads(
|
|
28
28
|
memory_task: Optional[Task] = None,
|
|
29
29
|
cultural_knowledge_task: Optional[Task] = None,
|
|
30
30
|
) -> None:
|
|
@@ -41,7 +41,7 @@ async def await_for_background_tasks(
|
|
|
41
41
|
log_warning(f"Error in cultural knowledge creation: {str(e)}")
|
|
42
42
|
|
|
43
43
|
|
|
44
|
-
def
|
|
44
|
+
def wait_for_open_threads(
|
|
45
45
|
memory_future: Optional[Future] = None, cultural_knowledge_future: Optional[Future] = None
|
|
46
46
|
) -> None:
|
|
47
47
|
if memory_future is not None:
|
|
@@ -58,7 +58,7 @@ def wait_for_background_tasks(
|
|
|
58
58
|
log_warning(f"Error in cultural knowledge creation: {str(e)}")
|
|
59
59
|
|
|
60
60
|
|
|
61
|
-
async def
|
|
61
|
+
async def await_for_thread_tasks_stream(
|
|
62
62
|
run_response: Union[RunOutput, TeamRunOutput],
|
|
63
63
|
memory_task: Optional[Task] = None,
|
|
64
64
|
cultural_knowledge_task: Optional[Task] = None,
|
|
@@ -109,7 +109,7 @@ async def await_for_background_tasks_stream(
|
|
|
109
109
|
log_warning(f"Error in cultural knowledge creation: {str(e)}")
|
|
110
110
|
|
|
111
111
|
|
|
112
|
-
def
|
|
112
|
+
def wait_for_thread_tasks_stream(
|
|
113
113
|
run_response: Union[TeamRunOutput, RunOutput],
|
|
114
114
|
memory_future: Optional[Future] = None,
|
|
115
115
|
cultural_knowledge_future: Optional[Future] = None,
|