agno 2.3.4__py3-none-any.whl → 2.3.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agno/agent/agent.py +184 -45
- agno/culture/manager.py +2 -2
- agno/db/base.py +330 -8
- agno/db/dynamo/dynamo.py +722 -2
- agno/db/dynamo/schemas.py +127 -0
- agno/db/firestore/firestore.py +573 -1
- agno/db/firestore/schemas.py +40 -0
- agno/db/gcs_json/gcs_json_db.py +446 -1
- agno/db/in_memory/in_memory_db.py +143 -1
- agno/db/json/json_db.py +438 -1
- agno/db/mongo/async_mongo.py +522 -0
- agno/db/mongo/mongo.py +523 -1
- agno/db/mongo/schemas.py +29 -0
- agno/db/mysql/mysql.py +536 -3
- agno/db/mysql/schemas.py +38 -0
- agno/db/postgres/async_postgres.py +553 -15
- agno/db/postgres/postgres.py +544 -5
- agno/db/postgres/schemas.py +38 -0
- agno/db/redis/redis.py +468 -1
- agno/db/redis/schemas.py +32 -0
- agno/db/singlestore/schemas.py +38 -0
- agno/db/singlestore/singlestore.py +523 -1
- agno/db/sqlite/async_sqlite.py +549 -10
- agno/db/sqlite/schemas.py +38 -0
- agno/db/sqlite/sqlite.py +540 -9
- agno/db/sqlite/utils.py +6 -8
- agno/db/surrealdb/models.py +25 -0
- agno/db/surrealdb/surrealdb.py +548 -1
- agno/eval/accuracy.py +18 -8
- agno/eval/performance.py +10 -4
- agno/eval/reliability.py +22 -13
- agno/exceptions.py +11 -0
- agno/hooks/__init__.py +3 -0
- agno/hooks/decorator.py +164 -0
- agno/integrations/discord/client.py +1 -1
- agno/knowledge/chunking/semantic.py +2 -2
- agno/models/aimlapi/aimlapi.py +2 -3
- agno/models/anthropic/claude.py +18 -13
- agno/models/aws/bedrock.py +3 -4
- agno/models/aws/claude.py +5 -1
- agno/models/azure/ai_foundry.py +2 -2
- agno/models/azure/openai_chat.py +8 -0
- agno/models/cerebras/cerebras.py +62 -11
- agno/models/cerebras/cerebras_openai.py +2 -3
- agno/models/cohere/chat.py +1 -5
- agno/models/cometapi/cometapi.py +2 -3
- agno/models/dashscope/dashscope.py +2 -3
- agno/models/deepinfra/deepinfra.py +2 -3
- agno/models/deepseek/deepseek.py +2 -3
- agno/models/fireworks/fireworks.py +2 -3
- agno/models/google/gemini.py +9 -7
- agno/models/groq/groq.py +2 -3
- agno/models/huggingface/huggingface.py +1 -5
- agno/models/ibm/watsonx.py +1 -5
- agno/models/internlm/internlm.py +2 -3
- agno/models/langdb/langdb.py +6 -4
- agno/models/litellm/chat.py +2 -2
- agno/models/litellm/litellm_openai.py +2 -3
- agno/models/meta/llama.py +1 -5
- agno/models/meta/llama_openai.py +4 -5
- agno/models/mistral/mistral.py +1 -5
- agno/models/nebius/nebius.py +2 -3
- agno/models/nvidia/nvidia.py +4 -5
- agno/models/openai/chat.py +14 -3
- agno/models/openai/responses.py +14 -3
- agno/models/openrouter/openrouter.py +4 -5
- agno/models/perplexity/perplexity.py +2 -3
- agno/models/portkey/portkey.py +7 -6
- agno/models/requesty/requesty.py +4 -5
- agno/models/response.py +2 -1
- agno/models/sambanova/sambanova.py +4 -5
- agno/models/siliconflow/siliconflow.py +3 -4
- agno/models/together/together.py +4 -5
- agno/models/vercel/v0.py +4 -5
- agno/models/vllm/vllm.py +19 -14
- agno/models/xai/xai.py +4 -5
- agno/os/app.py +104 -0
- agno/os/config.py +13 -0
- agno/os/interfaces/whatsapp/router.py +0 -1
- agno/os/interfaces/whatsapp/security.py +3 -1
- agno/os/mcp.py +1 -0
- agno/os/router.py +31 -0
- agno/os/routers/traces/__init__.py +3 -0
- agno/os/routers/traces/schemas.py +414 -0
- agno/os/routers/traces/traces.py +499 -0
- agno/os/schema.py +12 -2
- agno/os/utils.py +57 -0
- agno/run/agent.py +1 -0
- agno/run/base.py +17 -0
- agno/run/team.py +4 -0
- agno/table.py +10 -0
- agno/team/team.py +221 -69
- agno/tools/function.py +10 -8
- agno/tools/google_drive.py +4 -3
- agno/tools/nano_banana.py +1 -1
- agno/tools/spotify.py +922 -0
- agno/tracing/__init__.py +12 -0
- agno/tracing/exporter.py +157 -0
- agno/tracing/schemas.py +276 -0
- agno/tracing/setup.py +111 -0
- agno/utils/agent.py +6 -6
- agno/utils/hooks.py +56 -1
- agno/utils/mcp.py +1 -1
- agno/vectordb/qdrant/qdrant.py +22 -22
- agno/workflow/condition.py +8 -0
- agno/workflow/loop.py +8 -0
- agno/workflow/parallel.py +8 -0
- agno/workflow/router.py +8 -0
- agno/workflow/step.py +20 -0
- agno/workflow/steps.py +8 -0
- agno/workflow/workflow.py +88 -19
- {agno-2.3.4.dist-info → agno-2.3.6.dist-info}/METADATA +38 -33
- {agno-2.3.4.dist-info → agno-2.3.6.dist-info}/RECORD +116 -105
- {agno-2.3.4.dist-info → agno-2.3.6.dist-info}/WHEEL +0 -0
- {agno-2.3.4.dist-info → agno-2.3.6.dist-info}/licenses/LICENSE +0 -0
- {agno-2.3.4.dist-info → agno-2.3.6.dist-info}/top_level.txt +0 -0
agno/os/router.py
CHANGED
|
@@ -4,6 +4,7 @@ from uuid import uuid4
|
|
|
4
4
|
|
|
5
5
|
from fastapi import (
|
|
6
6
|
APIRouter,
|
|
7
|
+
BackgroundTasks,
|
|
7
8
|
Depends,
|
|
8
9
|
File,
|
|
9
10
|
Form,
|
|
@@ -300,9 +301,14 @@ async def agent_response_streamer(
|
|
|
300
301
|
audio: Optional[List[Audio]] = None,
|
|
301
302
|
videos: Optional[List[Video]] = None,
|
|
302
303
|
files: Optional[List[FileMedia]] = None,
|
|
304
|
+
background_tasks: Optional[BackgroundTasks] = None,
|
|
303
305
|
**kwargs: Any,
|
|
304
306
|
) -> AsyncGenerator:
|
|
305
307
|
try:
|
|
308
|
+
# Pass background_tasks if provided
|
|
309
|
+
if background_tasks is not None:
|
|
310
|
+
kwargs["background_tasks"] = background_tasks
|
|
311
|
+
|
|
306
312
|
run_response = agent.arun(
|
|
307
313
|
input=message,
|
|
308
314
|
session_id=session_id,
|
|
@@ -341,6 +347,7 @@ async def agent_continue_response_streamer(
|
|
|
341
347
|
updated_tools: Optional[List] = None,
|
|
342
348
|
session_id: Optional[str] = None,
|
|
343
349
|
user_id: Optional[str] = None,
|
|
350
|
+
background_tasks: Optional[BackgroundTasks] = None,
|
|
344
351
|
) -> AsyncGenerator:
|
|
345
352
|
try:
|
|
346
353
|
continue_response = agent.acontinue_run(
|
|
@@ -350,6 +357,7 @@ async def agent_continue_response_streamer(
|
|
|
350
357
|
user_id=user_id,
|
|
351
358
|
stream=True,
|
|
352
359
|
stream_events=True,
|
|
360
|
+
background_tasks=background_tasks,
|
|
353
361
|
)
|
|
354
362
|
async for run_response_chunk in continue_response:
|
|
355
363
|
yield format_sse_event(run_response_chunk) # type: ignore
|
|
@@ -384,10 +392,15 @@ async def team_response_streamer(
|
|
|
384
392
|
audio: Optional[List[Audio]] = None,
|
|
385
393
|
videos: Optional[List[Video]] = None,
|
|
386
394
|
files: Optional[List[FileMedia]] = None,
|
|
395
|
+
background_tasks: Optional[BackgroundTasks] = None,
|
|
387
396
|
**kwargs: Any,
|
|
388
397
|
) -> AsyncGenerator:
|
|
389
398
|
"""Run the given team asynchronously and yield its response"""
|
|
390
399
|
try:
|
|
400
|
+
# Pass background_tasks if provided
|
|
401
|
+
if background_tasks is not None:
|
|
402
|
+
kwargs["background_tasks"] = background_tasks
|
|
403
|
+
|
|
391
404
|
run_response = team.arun(
|
|
392
405
|
input=message,
|
|
393
406
|
session_id=session_id,
|
|
@@ -494,9 +507,14 @@ async def workflow_response_streamer(
|
|
|
494
507
|
input: Optional[Union[str, Dict[str, Any], List[Any], BaseModel]] = None,
|
|
495
508
|
session_id: Optional[str] = None,
|
|
496
509
|
user_id: Optional[str] = None,
|
|
510
|
+
background_tasks: Optional[BackgroundTasks] = None,
|
|
497
511
|
**kwargs: Any,
|
|
498
512
|
) -> AsyncGenerator:
|
|
499
513
|
try:
|
|
514
|
+
# Pass background_tasks if provided
|
|
515
|
+
if background_tasks is not None:
|
|
516
|
+
kwargs["background_tasks"] = background_tasks
|
|
517
|
+
|
|
500
518
|
run_response = workflow.arun(
|
|
501
519
|
input=input,
|
|
502
520
|
session_id=session_id,
|
|
@@ -721,6 +739,7 @@ def get_base_router(
|
|
|
721
739
|
knowledge=os._get_knowledge_config(),
|
|
722
740
|
evals=os._get_evals_config(),
|
|
723
741
|
metrics=os._get_metrics_config(),
|
|
742
|
+
traces=os._get_traces_config(),
|
|
724
743
|
agents=[AgentSummaryResponse.from_agent(agent) for agent in os.agents] if os.agents else [],
|
|
725
744
|
teams=[TeamSummaryResponse.from_team(team) for team in os.teams] if os.teams else [],
|
|
726
745
|
workflows=[WorkflowSummaryResponse.from_workflow(w) for w in os.workflows] if os.workflows else [],
|
|
@@ -813,6 +832,7 @@ def get_base_router(
|
|
|
813
832
|
async def create_agent_run(
|
|
814
833
|
agent_id: str,
|
|
815
834
|
request: Request,
|
|
835
|
+
background_tasks: BackgroundTasks,
|
|
816
836
|
message: str = Form(...),
|
|
817
837
|
stream: bool = Form(False),
|
|
818
838
|
session_id: Optional[str] = Form(None),
|
|
@@ -951,6 +971,7 @@ def get_base_router(
|
|
|
951
971
|
audio=base64_audios if base64_audios else None,
|
|
952
972
|
videos=base64_videos if base64_videos else None,
|
|
953
973
|
files=input_files if input_files else None,
|
|
974
|
+
background_tasks=background_tasks,
|
|
954
975
|
**kwargs,
|
|
955
976
|
),
|
|
956
977
|
media_type="text/event-stream",
|
|
@@ -968,6 +989,7 @@ def get_base_router(
|
|
|
968
989
|
videos=base64_videos if base64_videos else None,
|
|
969
990
|
files=input_files if input_files else None,
|
|
970
991
|
stream=False,
|
|
992
|
+
background_tasks=background_tasks,
|
|
971
993
|
**kwargs,
|
|
972
994
|
),
|
|
973
995
|
)
|
|
@@ -1036,6 +1058,7 @@ def get_base_router(
|
|
|
1036
1058
|
agent_id: str,
|
|
1037
1059
|
run_id: str,
|
|
1038
1060
|
request: Request,
|
|
1061
|
+
background_tasks: BackgroundTasks,
|
|
1039
1062
|
tools: str = Form(...), # JSON string of tools
|
|
1040
1063
|
session_id: Optional[str] = Form(None),
|
|
1041
1064
|
user_id: Optional[str] = Form(None),
|
|
@@ -1079,6 +1102,7 @@ def get_base_router(
|
|
|
1079
1102
|
updated_tools=updated_tools,
|
|
1080
1103
|
session_id=session_id,
|
|
1081
1104
|
user_id=user_id,
|
|
1105
|
+
background_tasks=background_tasks,
|
|
1082
1106
|
),
|
|
1083
1107
|
media_type="text/event-stream",
|
|
1084
1108
|
)
|
|
@@ -1092,6 +1116,7 @@ def get_base_router(
|
|
|
1092
1116
|
session_id=session_id,
|
|
1093
1117
|
user_id=user_id,
|
|
1094
1118
|
stream=False,
|
|
1119
|
+
background_tasks=background_tasks,
|
|
1095
1120
|
),
|
|
1096
1121
|
)
|
|
1097
1122
|
return run_response_obj.to_dict()
|
|
@@ -1229,6 +1254,7 @@ def get_base_router(
|
|
|
1229
1254
|
async def create_team_run(
|
|
1230
1255
|
team_id: str,
|
|
1231
1256
|
request: Request,
|
|
1257
|
+
background_tasks: BackgroundTasks,
|
|
1232
1258
|
message: str = Form(...),
|
|
1233
1259
|
stream: bool = Form(True),
|
|
1234
1260
|
monitor: bool = Form(True),
|
|
@@ -1338,6 +1364,7 @@ def get_base_router(
|
|
|
1338
1364
|
audio=base64_audios if base64_audios else None,
|
|
1339
1365
|
videos=base64_videos if base64_videos else None,
|
|
1340
1366
|
files=document_files if document_files else None,
|
|
1367
|
+
background_tasks=background_tasks,
|
|
1341
1368
|
**kwargs,
|
|
1342
1369
|
),
|
|
1343
1370
|
media_type="text/event-stream",
|
|
@@ -1353,6 +1380,7 @@ def get_base_router(
|
|
|
1353
1380
|
videos=base64_videos if base64_videos else None,
|
|
1354
1381
|
files=document_files if document_files else None,
|
|
1355
1382
|
stream=False,
|
|
1383
|
+
background_tasks=background_tasks,
|
|
1356
1384
|
**kwargs,
|
|
1357
1385
|
)
|
|
1358
1386
|
return run_response.to_dict()
|
|
@@ -1683,6 +1711,7 @@ def get_base_router(
|
|
|
1683
1711
|
async def create_workflow_run(
|
|
1684
1712
|
workflow_id: str,
|
|
1685
1713
|
request: Request,
|
|
1714
|
+
background_tasks: BackgroundTasks,
|
|
1686
1715
|
message: str = Form(...),
|
|
1687
1716
|
stream: bool = Form(True),
|
|
1688
1717
|
session_id: Optional[str] = Form(None),
|
|
@@ -1734,6 +1763,7 @@ def get_base_router(
|
|
|
1734
1763
|
input=message,
|
|
1735
1764
|
session_id=session_id,
|
|
1736
1765
|
user_id=user_id,
|
|
1766
|
+
background_tasks=background_tasks,
|
|
1737
1767
|
**kwargs,
|
|
1738
1768
|
),
|
|
1739
1769
|
media_type="text/event-stream",
|
|
@@ -1744,6 +1774,7 @@ def get_base_router(
|
|
|
1744
1774
|
session_id=session_id,
|
|
1745
1775
|
user_id=user_id,
|
|
1746
1776
|
stream=False,
|
|
1777
|
+
background_tasks=background_tasks,
|
|
1747
1778
|
**kwargs,
|
|
1748
1779
|
)
|
|
1749
1780
|
return run_response.to_dict()
|
|
@@ -0,0 +1,414 @@
|
|
|
1
|
+
from datetime import datetime
|
|
2
|
+
from typing import Any, Dict, List, Optional
|
|
3
|
+
|
|
4
|
+
from pydantic import BaseModel, Field
|
|
5
|
+
|
|
6
|
+
from agno.os.utils import format_duration_ms
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def _derive_span_type(span: Any) -> str:
|
|
10
|
+
"""
|
|
11
|
+
Derive the correct span type from span attributes.
|
|
12
|
+
|
|
13
|
+
OpenInference sets span_kind to:
|
|
14
|
+
- AGENT for both agents and teams
|
|
15
|
+
- CHAIN for workflows
|
|
16
|
+
|
|
17
|
+
We use additional context (agno.team.id, agno.workflow.id) to differentiate:
|
|
18
|
+
- WORKFLOW: CHAIN spans or spans with agno.workflow.id
|
|
19
|
+
- TEAM: AGENT spans with agno.team.id
|
|
20
|
+
- AGENT: AGENT spans without agno.team.id
|
|
21
|
+
- LLM, TOOL, etc.: unchanged
|
|
22
|
+
"""
|
|
23
|
+
span_kind = span.attributes.get("openinference.span.kind", "UNKNOWN")
|
|
24
|
+
|
|
25
|
+
# Check for workflow (CHAIN kind or has workflow.id)
|
|
26
|
+
if span_kind == "CHAIN":
|
|
27
|
+
return "WORKFLOW"
|
|
28
|
+
|
|
29
|
+
# Check for team vs agent
|
|
30
|
+
if span_kind == "AGENT":
|
|
31
|
+
# If it has a team.id attribute, it's a TEAM span
|
|
32
|
+
if span.attributes.get("agno.team.id") or span.attributes.get("team.id"):
|
|
33
|
+
return "TEAM"
|
|
34
|
+
return "AGENT"
|
|
35
|
+
|
|
36
|
+
# Return original span kind for LLM, TOOL, etc.
|
|
37
|
+
return span_kind
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class TraceNode(BaseModel):
|
|
41
|
+
"""Recursive node structure for rendering trace hierarchy in the frontend"""
|
|
42
|
+
|
|
43
|
+
id: str = Field(..., description="Span ID")
|
|
44
|
+
name: str = Field(..., description="Span name (e.g., 'agent.run', 'llm.invoke')")
|
|
45
|
+
type: str = Field(..., description="Span kind (AGENT, TEAM, WORKFLOW, LLM, TOOL)")
|
|
46
|
+
duration: str = Field(..., description="Human-readable duration (e.g., '123ms', '1.5s')")
|
|
47
|
+
start_time: datetime = Field(..., description="Start time (Pydantic auto-serializes to ISO 8601)")
|
|
48
|
+
end_time: datetime = Field(..., description="End time (Pydantic auto-serializes to ISO 8601)")
|
|
49
|
+
status: str = Field(..., description="Status code (OK, ERROR)")
|
|
50
|
+
input: Optional[str] = Field(None, description="Input to the span")
|
|
51
|
+
output: Optional[str] = Field(None, description="Output from the span")
|
|
52
|
+
error: Optional[str] = Field(None, description="Error message if status is ERROR")
|
|
53
|
+
spans: Optional[List["TraceNode"]] = Field(None, description="Child spans in the trace hierarchy")
|
|
54
|
+
step_type: Optional[str] = Field(None, description="Workflow step type (Step, Condition, function, Agent, Team)")
|
|
55
|
+
metadata: Optional[Dict[str, Any]] = Field(None, description="Additional span attributes and data")
|
|
56
|
+
extra_data: Optional[Dict[str, Any]] = Field(
|
|
57
|
+
None, description="Flexible field for custom attributes and additional data"
|
|
58
|
+
)
|
|
59
|
+
|
|
60
|
+
@classmethod
|
|
61
|
+
def from_span(cls, span: Any, spans: Optional[List["TraceNode"]] = None) -> "TraceNode":
|
|
62
|
+
"""Create TraceNode from a Span object"""
|
|
63
|
+
# Derive the correct span type (AGENT, TEAM, WORKFLOW, LLM, TOOL, etc.)
|
|
64
|
+
span_type = _derive_span_type(span)
|
|
65
|
+
|
|
66
|
+
# Also get the raw span_kind for metadata extraction logic
|
|
67
|
+
span_kind = span.attributes.get("openinference.span.kind", "UNKNOWN")
|
|
68
|
+
|
|
69
|
+
# Extract input/output at root level (for all span types)
|
|
70
|
+
input_val = span.attributes.get("input.value")
|
|
71
|
+
output_val = span.attributes.get("output.value")
|
|
72
|
+
|
|
73
|
+
# Extract error information
|
|
74
|
+
error_val = None
|
|
75
|
+
if span.status_code == "ERROR":
|
|
76
|
+
error_val = span.status_message or span.attributes.get("exception.message")
|
|
77
|
+
output_val = None
|
|
78
|
+
|
|
79
|
+
# Build metadata with key attributes based on span kind
|
|
80
|
+
metadata: Dict[str, Any] = {}
|
|
81
|
+
|
|
82
|
+
if span_kind == "AGENT":
|
|
83
|
+
if run_id := span.attributes.get("agno.run.id"):
|
|
84
|
+
metadata["run_id"] = run_id
|
|
85
|
+
|
|
86
|
+
elif span_kind == "LLM":
|
|
87
|
+
if model_name := span.attributes.get("llm.model_name"):
|
|
88
|
+
metadata["model"] = model_name
|
|
89
|
+
if input_tokens := span.attributes.get("llm.token_count.prompt"):
|
|
90
|
+
metadata["input_tokens"] = input_tokens
|
|
91
|
+
if output_tokens := span.attributes.get("llm.token_count.completion"):
|
|
92
|
+
metadata["output_tokens"] = output_tokens
|
|
93
|
+
|
|
94
|
+
elif span_kind == "TOOL":
|
|
95
|
+
if tool_name := span.attributes.get("tool.name"):
|
|
96
|
+
metadata["tool_name"] = tool_name
|
|
97
|
+
if tool_params := span.attributes.get("tool.parameters"):
|
|
98
|
+
metadata["parameters"] = tool_params
|
|
99
|
+
|
|
100
|
+
elif span_kind == "CHAIN":
|
|
101
|
+
if workflow_description := span.attributes.get("agno.workflow.description"):
|
|
102
|
+
metadata["description"] = workflow_description
|
|
103
|
+
if steps_count := span.attributes.get("agno.workflow.steps_count"):
|
|
104
|
+
metadata["steps_count"] = steps_count
|
|
105
|
+
if steps := span.attributes.get("agno.workflow.steps"):
|
|
106
|
+
metadata["steps"] = steps
|
|
107
|
+
if step_types := span.attributes.get("agno.workflow.step_types"):
|
|
108
|
+
metadata["step_types"] = step_types
|
|
109
|
+
|
|
110
|
+
# Add session/user context if present
|
|
111
|
+
if session_id := span.attributes.get("session.id"):
|
|
112
|
+
metadata["session_id"] = session_id
|
|
113
|
+
if user_id := span.attributes.get("user.id"):
|
|
114
|
+
metadata["user_id"] = user_id
|
|
115
|
+
|
|
116
|
+
# Use datetime objects directly
|
|
117
|
+
return cls(
|
|
118
|
+
id=span.span_id,
|
|
119
|
+
name=span.name,
|
|
120
|
+
type=span_type,
|
|
121
|
+
duration=format_duration_ms(span.duration_ms),
|
|
122
|
+
start_time=span.start_time,
|
|
123
|
+
end_time=span.end_time,
|
|
124
|
+
status=span.status_code,
|
|
125
|
+
input=input_val,
|
|
126
|
+
output=output_val,
|
|
127
|
+
error=error_val,
|
|
128
|
+
spans=spans,
|
|
129
|
+
step_type=None, # Set by _build_span_tree for workflow steps
|
|
130
|
+
metadata=metadata if metadata else None,
|
|
131
|
+
extra_data=None,
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
class TraceSummary(BaseModel):
|
|
136
|
+
"""Summary information for trace list view"""
|
|
137
|
+
|
|
138
|
+
trace_id: str = Field(..., description="Unique trace identifier")
|
|
139
|
+
name: str = Field(..., description="Trace name (usually root span name)")
|
|
140
|
+
status: str = Field(..., description="Overall status (OK, ERROR, UNSET)")
|
|
141
|
+
duration: str = Field(..., description="Human-readable total duration")
|
|
142
|
+
start_time: datetime = Field(..., description="Trace start time (Pydantic auto-serializes to ISO 8601)")
|
|
143
|
+
end_time: datetime = Field(..., description="Trace end time (Pydantic auto-serializes to ISO 8601)")
|
|
144
|
+
total_spans: int = Field(..., description="Total number of spans in this trace")
|
|
145
|
+
error_count: int = Field(..., description="Number of spans with errors")
|
|
146
|
+
input: Optional[str] = Field(None, description="Input to the agent")
|
|
147
|
+
run_id: Optional[str] = Field(None, description="Associated run ID")
|
|
148
|
+
session_id: Optional[str] = Field(None, description="Associated session ID")
|
|
149
|
+
user_id: Optional[str] = Field(None, description="Associated user ID")
|
|
150
|
+
agent_id: Optional[str] = Field(None, description="Associated agent ID")
|
|
151
|
+
team_id: Optional[str] = Field(None, description="Associated team ID")
|
|
152
|
+
workflow_id: Optional[str] = Field(None, description="Associated workflow ID")
|
|
153
|
+
created_at: datetime = Field(..., description="Time when trace was created (Pydantic auto-serializes to ISO 8601)")
|
|
154
|
+
|
|
155
|
+
@classmethod
|
|
156
|
+
def from_trace(cls, trace: Any, input: Optional[str] = None) -> "TraceSummary":
|
|
157
|
+
# Use datetime objects directly (Pydantic will auto-serialize to ISO 8601)
|
|
158
|
+
return cls(
|
|
159
|
+
trace_id=trace.trace_id,
|
|
160
|
+
name=trace.name,
|
|
161
|
+
status=trace.status,
|
|
162
|
+
duration=format_duration_ms(trace.duration_ms),
|
|
163
|
+
start_time=trace.start_time,
|
|
164
|
+
end_time=trace.end_time,
|
|
165
|
+
total_spans=trace.total_spans,
|
|
166
|
+
error_count=trace.error_count,
|
|
167
|
+
input=input,
|
|
168
|
+
run_id=trace.run_id,
|
|
169
|
+
session_id=trace.session_id,
|
|
170
|
+
user_id=trace.user_id,
|
|
171
|
+
agent_id=trace.agent_id,
|
|
172
|
+
team_id=trace.team_id,
|
|
173
|
+
workflow_id=trace.workflow_id,
|
|
174
|
+
created_at=trace.created_at,
|
|
175
|
+
)
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
class TraceSessionStats(BaseModel):
|
|
179
|
+
"""Aggregated trace statistics grouped by session"""
|
|
180
|
+
|
|
181
|
+
session_id: str = Field(..., description="Session identifier")
|
|
182
|
+
user_id: Optional[str] = Field(None, description="User ID associated with the session")
|
|
183
|
+
agent_id: Optional[str] = Field(None, description="Agent ID(s) used in the session")
|
|
184
|
+
team_id: Optional[str] = Field(None, description="Team ID associated with the session")
|
|
185
|
+
workflow_id: Optional[str] = Field(None, description="Workflow ID associated with the session")
|
|
186
|
+
total_traces: int = Field(..., description="Total number of traces in this session")
|
|
187
|
+
first_trace_at: datetime = Field(..., description="Time of first trace (Pydantic auto-serializes to ISO 8601)")
|
|
188
|
+
last_trace_at: datetime = Field(..., description="Time of last trace (Pydantic auto-serializes to ISO 8601)")
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
class TraceDetail(BaseModel):
|
|
192
|
+
"""Detailed trace information with hierarchical span tree"""
|
|
193
|
+
|
|
194
|
+
trace_id: str = Field(..., description="Unique trace identifier")
|
|
195
|
+
name: str = Field(..., description="Trace name (usually root span name)")
|
|
196
|
+
status: str = Field(..., description="Overall status (OK, ERROR)")
|
|
197
|
+
duration: str = Field(..., description="Human-readable total duration")
|
|
198
|
+
start_time: datetime = Field(..., description="Trace start time (Pydantic auto-serializes to ISO 8601)")
|
|
199
|
+
end_time: datetime = Field(..., description="Trace end time (Pydantic auto-serializes to ISO 8601)")
|
|
200
|
+
total_spans: int = Field(..., description="Total number of spans in this trace")
|
|
201
|
+
error_count: int = Field(..., description="Number of spans with errors")
|
|
202
|
+
input: Optional[str] = Field(None, description="Input to the agent/workflow")
|
|
203
|
+
output: Optional[str] = Field(None, description="Output from the agent/workflow")
|
|
204
|
+
error: Optional[str] = Field(None, description="Error message if status is ERROR")
|
|
205
|
+
run_id: Optional[str] = Field(None, description="Associated run ID")
|
|
206
|
+
session_id: Optional[str] = Field(None, description="Associated session ID")
|
|
207
|
+
user_id: Optional[str] = Field(None, description="Associated user ID")
|
|
208
|
+
agent_id: Optional[str] = Field(None, description="Associated agent ID")
|
|
209
|
+
team_id: Optional[str] = Field(None, description="Associated team ID")
|
|
210
|
+
workflow_id: Optional[str] = Field(None, description="Associated workflow ID")
|
|
211
|
+
created_at: datetime = Field(..., description="Time when trace was created (Pydantic auto-serializes to ISO 8601)")
|
|
212
|
+
tree: List[TraceNode] = Field(..., description="Hierarchical tree of spans (root nodes)")
|
|
213
|
+
|
|
214
|
+
@classmethod
|
|
215
|
+
def from_trace_and_spans(cls, trace: Any, spans: List[Any]) -> "TraceDetail":
|
|
216
|
+
"""Create TraceDetail from a Trace and its Spans, building the tree structure"""
|
|
217
|
+
# Find root span to extract input/output/error
|
|
218
|
+
root_span = next((s for s in spans if not s.parent_span_id), None)
|
|
219
|
+
trace_input = None
|
|
220
|
+
trace_output = None
|
|
221
|
+
trace_error = None
|
|
222
|
+
|
|
223
|
+
if root_span:
|
|
224
|
+
trace_input = root_span.attributes.get("input.value")
|
|
225
|
+
output_val = root_span.attributes.get("output.value")
|
|
226
|
+
|
|
227
|
+
# If trace status is ERROR, extract error and set output to None
|
|
228
|
+
if trace.status == "ERROR" or root_span.status_code == "ERROR":
|
|
229
|
+
trace_error = root_span.status_message or root_span.attributes.get("exception.message")
|
|
230
|
+
trace_output = None
|
|
231
|
+
else:
|
|
232
|
+
trace_output = output_val
|
|
233
|
+
|
|
234
|
+
span_kind = root_span.attributes.get("openinference.span.kind", "")
|
|
235
|
+
output_is_empty = not trace_output or trace_output == "None" or str(trace_output).strip() == "None"
|
|
236
|
+
if span_kind == "CHAIN" and output_is_empty and trace.status != "ERROR":
|
|
237
|
+
# Find direct children of root span (workflow steps)
|
|
238
|
+
root_span_id = root_span.span_id
|
|
239
|
+
direct_children = [s for s in spans if s.parent_span_id == root_span_id]
|
|
240
|
+
if direct_children:
|
|
241
|
+
# Sort by end_time to get the last executed step
|
|
242
|
+
direct_children.sort(key=lambda s: s.end_time, reverse=True)
|
|
243
|
+
last_step = direct_children[0]
|
|
244
|
+
# Get output from the last step
|
|
245
|
+
trace_output = last_step.attributes.get("output.value")
|
|
246
|
+
|
|
247
|
+
# Calculate total tokens from all LLM spans
|
|
248
|
+
total_input_tokens = 0
|
|
249
|
+
total_output_tokens = 0
|
|
250
|
+
for span in spans:
|
|
251
|
+
if span.attributes.get("openinference.span.kind") == "LLM":
|
|
252
|
+
input_tokens = span.attributes.get("llm.token_count.prompt", 0)
|
|
253
|
+
output_tokens = span.attributes.get("llm.token_count.completion", 0)
|
|
254
|
+
if input_tokens:
|
|
255
|
+
total_input_tokens += input_tokens
|
|
256
|
+
if output_tokens:
|
|
257
|
+
total_output_tokens += output_tokens
|
|
258
|
+
|
|
259
|
+
# Build span tree with token totals
|
|
260
|
+
span_tree = cls._build_span_tree(
|
|
261
|
+
spans,
|
|
262
|
+
total_input_tokens,
|
|
263
|
+
total_output_tokens,
|
|
264
|
+
trace_start_time=trace.start_time,
|
|
265
|
+
trace_end_time=trace.end_time,
|
|
266
|
+
trace_duration_ms=trace.duration_ms,
|
|
267
|
+
)
|
|
268
|
+
|
|
269
|
+
# Use datetime objects directly (Pydantic will auto-serialize to ISO 8601)
|
|
270
|
+
return cls(
|
|
271
|
+
trace_id=trace.trace_id,
|
|
272
|
+
name=trace.name,
|
|
273
|
+
status=trace.status,
|
|
274
|
+
duration=format_duration_ms(trace.duration_ms),
|
|
275
|
+
start_time=trace.start_time,
|
|
276
|
+
end_time=trace.end_time,
|
|
277
|
+
total_spans=trace.total_spans,
|
|
278
|
+
error_count=trace.error_count,
|
|
279
|
+
input=trace_input,
|
|
280
|
+
output=trace_output,
|
|
281
|
+
error=trace_error,
|
|
282
|
+
run_id=trace.run_id,
|
|
283
|
+
session_id=trace.session_id,
|
|
284
|
+
user_id=trace.user_id,
|
|
285
|
+
agent_id=trace.agent_id,
|
|
286
|
+
team_id=trace.team_id,
|
|
287
|
+
workflow_id=trace.workflow_id,
|
|
288
|
+
created_at=trace.created_at,
|
|
289
|
+
tree=span_tree,
|
|
290
|
+
)
|
|
291
|
+
|
|
292
|
+
@staticmethod
|
|
293
|
+
def _build_span_tree(
|
|
294
|
+
spans: List[Any],
|
|
295
|
+
total_input_tokens: int,
|
|
296
|
+
total_output_tokens: int,
|
|
297
|
+
trace_start_time: Optional[datetime] = None,
|
|
298
|
+
trace_end_time: Optional[datetime] = None,
|
|
299
|
+
trace_duration_ms: Optional[int] = None,
|
|
300
|
+
) -> List[TraceNode]:
|
|
301
|
+
"""Build hierarchical tree from flat list of spans
|
|
302
|
+
|
|
303
|
+
Args:
|
|
304
|
+
spans: List of span objects
|
|
305
|
+
total_input_tokens: Total input tokens across all spans
|
|
306
|
+
total_output_tokens: Total output tokens across all spans
|
|
307
|
+
trace_start_time: Corrected start time from trace aggregation
|
|
308
|
+
trace_end_time: Corrected end time from trace aggregation
|
|
309
|
+
trace_duration_ms: Corrected duration from trace aggregation
|
|
310
|
+
"""
|
|
311
|
+
if not spans:
|
|
312
|
+
return []
|
|
313
|
+
|
|
314
|
+
# Create a map of parent_id -> list of spans
|
|
315
|
+
spans_map: Dict[Optional[str], List[Any]] = {}
|
|
316
|
+
for span in spans:
|
|
317
|
+
parent_id = span.parent_span_id
|
|
318
|
+
if parent_id not in spans_map:
|
|
319
|
+
spans_map[parent_id] = []
|
|
320
|
+
spans_map[parent_id].append(span)
|
|
321
|
+
|
|
322
|
+
# Extract step_types list from workflow root span for index-based matching
|
|
323
|
+
step_types_list: List[str] = []
|
|
324
|
+
root_spans = spans_map.get(None, [])
|
|
325
|
+
for root_span in root_spans:
|
|
326
|
+
span_kind = root_span.attributes.get("openinference.span.kind", "")
|
|
327
|
+
if span_kind == "CHAIN":
|
|
328
|
+
step_types = root_span.attributes.get("agno.workflow.step_types", [])
|
|
329
|
+
if step_types:
|
|
330
|
+
step_types_list = list(step_types)
|
|
331
|
+
break # Use first workflow root span's step_types
|
|
332
|
+
|
|
333
|
+
# Recursive function to build tree for a span
|
|
334
|
+
# step_index is used to track position within direct children of root (workflow steps)
|
|
335
|
+
def build_node(span: Any, is_root: bool = False, step_index: Optional[int] = None) -> TraceNode:
|
|
336
|
+
span_id = span.span_id
|
|
337
|
+
children_spans = spans_map.get(span_id, [])
|
|
338
|
+
|
|
339
|
+
# Sort children spans by start time
|
|
340
|
+
if children_spans:
|
|
341
|
+
children_spans.sort(key=lambda s: s.start_time)
|
|
342
|
+
|
|
343
|
+
# Recursively build spans
|
|
344
|
+
# For root span's direct children (workflow steps), pass the index
|
|
345
|
+
children_nodes: Optional[List[TraceNode]] = None
|
|
346
|
+
if is_root and step_types_list:
|
|
347
|
+
children_nodes = []
|
|
348
|
+
for idx, child in enumerate(children_spans):
|
|
349
|
+
children_nodes.append(build_node(child, step_index=idx))
|
|
350
|
+
elif children_spans:
|
|
351
|
+
children_nodes = [build_node(child) for child in children_spans]
|
|
352
|
+
|
|
353
|
+
# For root span, create custom metadata with token totals
|
|
354
|
+
if is_root:
|
|
355
|
+
# Build simplified metadata for root with token totals
|
|
356
|
+
root_metadata: Dict[str, Any] = {}
|
|
357
|
+
if total_input_tokens > 0:
|
|
358
|
+
root_metadata["total_input_tokens"] = total_input_tokens
|
|
359
|
+
if total_output_tokens > 0:
|
|
360
|
+
root_metadata["total_output_tokens"] = total_output_tokens
|
|
361
|
+
|
|
362
|
+
# Use trace-level timing if available
|
|
363
|
+
start_time = trace_start_time if trace_start_time else span.start_time
|
|
364
|
+
end_time = trace_end_time if trace_end_time else span.end_time
|
|
365
|
+
duration_ms = trace_duration_ms if trace_duration_ms is not None else span.duration_ms
|
|
366
|
+
|
|
367
|
+
# Derive the correct span type (AGENT, TEAM, WORKFLOW, etc.)
|
|
368
|
+
span_type = _derive_span_type(span)
|
|
369
|
+
span_kind = span.attributes.get("openinference.span.kind", "UNKNOWN")
|
|
370
|
+
|
|
371
|
+
# Add workflow-specific metadata for CHAIN/WORKFLOW spans
|
|
372
|
+
if span_kind == "CHAIN":
|
|
373
|
+
if workflow_description := span.attributes.get("agno.workflow.description"):
|
|
374
|
+
root_metadata["description"] = workflow_description
|
|
375
|
+
if steps_count := span.attributes.get("agno.workflow.steps_count"):
|
|
376
|
+
root_metadata["steps_count"] = steps_count
|
|
377
|
+
if steps := span.attributes.get("agno.workflow.steps"):
|
|
378
|
+
root_metadata["steps"] = steps
|
|
379
|
+
if step_types := span.attributes.get("agno.workflow.step_types"):
|
|
380
|
+
root_metadata["step_types"] = step_types
|
|
381
|
+
|
|
382
|
+
# Use datetime objects directly (Pydantic will auto-serialize to ISO 8601)
|
|
383
|
+
# Skip input/output/error for root span (already at top level of TraceDetail)
|
|
384
|
+
|
|
385
|
+
return TraceNode(
|
|
386
|
+
id=span.span_id,
|
|
387
|
+
name=span.name,
|
|
388
|
+
type=span_type,
|
|
389
|
+
duration=format_duration_ms(duration_ms),
|
|
390
|
+
start_time=start_time,
|
|
391
|
+
end_time=end_time,
|
|
392
|
+
status=span.status_code,
|
|
393
|
+
input=None, # Skip for root span (already at TraceDetail level)
|
|
394
|
+
output=None, # Skip for root span (already at TraceDetail level)
|
|
395
|
+
error=None, # Skip for root span (already at TraceDetail level)
|
|
396
|
+
spans=children_nodes if children_nodes else None,
|
|
397
|
+
metadata=root_metadata if root_metadata else None,
|
|
398
|
+
extra_data=None,
|
|
399
|
+
)
|
|
400
|
+
else:
|
|
401
|
+
# Create node from span
|
|
402
|
+
node = TraceNode.from_span(span, spans=children_nodes)
|
|
403
|
+
|
|
404
|
+
# For workflow step spans (direct children of root), assign step_type by index
|
|
405
|
+
if step_index is not None and step_types_list and step_index < len(step_types_list):
|
|
406
|
+
node.step_type = step_types_list[step_index]
|
|
407
|
+
|
|
408
|
+
return node
|
|
409
|
+
|
|
410
|
+
# Sort root spans by start time
|
|
411
|
+
root_spans.sort(key=lambda s: s.start_time)
|
|
412
|
+
|
|
413
|
+
# Build tree starting from roots
|
|
414
|
+
return [build_node(root, is_root=True) for root in root_spans]
|