agno 2.3.4__py3-none-any.whl → 2.3.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agno/agent/agent.py +177 -41
- agno/culture/manager.py +2 -2
- agno/db/base.py +330 -8
- agno/db/dynamo/dynamo.py +722 -2
- agno/db/dynamo/schemas.py +127 -0
- agno/db/firestore/firestore.py +573 -1
- agno/db/firestore/schemas.py +40 -0
- agno/db/gcs_json/gcs_json_db.py +446 -1
- agno/db/in_memory/in_memory_db.py +143 -1
- agno/db/json/json_db.py +438 -1
- agno/db/mongo/async_mongo.py +522 -0
- agno/db/mongo/mongo.py +523 -1
- agno/db/mongo/schemas.py +29 -0
- agno/db/mysql/mysql.py +536 -3
- agno/db/mysql/schemas.py +38 -0
- agno/db/postgres/async_postgres.py +541 -13
- agno/db/postgres/postgres.py +535 -2
- agno/db/postgres/schemas.py +38 -0
- agno/db/redis/redis.py +468 -1
- agno/db/redis/schemas.py +32 -0
- agno/db/singlestore/schemas.py +38 -0
- agno/db/singlestore/singlestore.py +523 -1
- agno/db/sqlite/async_sqlite.py +548 -9
- agno/db/sqlite/schemas.py +38 -0
- agno/db/sqlite/sqlite.py +537 -5
- agno/db/sqlite/utils.py +6 -8
- agno/db/surrealdb/models.py +25 -0
- agno/db/surrealdb/surrealdb.py +548 -1
- agno/eval/accuracy.py +10 -4
- agno/eval/performance.py +10 -4
- agno/eval/reliability.py +22 -13
- agno/exceptions.py +11 -0
- agno/hooks/__init__.py +3 -0
- agno/hooks/decorator.py +164 -0
- agno/knowledge/chunking/semantic.py +2 -2
- agno/models/aimlapi/aimlapi.py +2 -3
- agno/models/anthropic/claude.py +18 -13
- agno/models/aws/bedrock.py +3 -4
- agno/models/aws/claude.py +5 -1
- agno/models/azure/ai_foundry.py +2 -2
- agno/models/azure/openai_chat.py +8 -0
- agno/models/cerebras/cerebras.py +63 -11
- agno/models/cerebras/cerebras_openai.py +2 -3
- agno/models/cohere/chat.py +1 -5
- agno/models/cometapi/cometapi.py +2 -3
- agno/models/dashscope/dashscope.py +2 -3
- agno/models/deepinfra/deepinfra.py +2 -3
- agno/models/deepseek/deepseek.py +2 -3
- agno/models/fireworks/fireworks.py +2 -3
- agno/models/google/gemini.py +9 -7
- agno/models/groq/groq.py +2 -3
- agno/models/huggingface/huggingface.py +1 -5
- agno/models/ibm/watsonx.py +1 -5
- agno/models/internlm/internlm.py +2 -3
- agno/models/langdb/langdb.py +6 -4
- agno/models/litellm/chat.py +2 -2
- agno/models/litellm/litellm_openai.py +2 -3
- agno/models/meta/llama.py +1 -5
- agno/models/meta/llama_openai.py +4 -5
- agno/models/mistral/mistral.py +1 -5
- agno/models/nebius/nebius.py +2 -3
- agno/models/nvidia/nvidia.py +4 -5
- agno/models/openai/chat.py +14 -3
- agno/models/openai/responses.py +14 -3
- agno/models/openrouter/openrouter.py +4 -5
- agno/models/perplexity/perplexity.py +2 -3
- agno/models/portkey/portkey.py +7 -6
- agno/models/requesty/requesty.py +4 -5
- agno/models/response.py +2 -1
- agno/models/sambanova/sambanova.py +4 -5
- agno/models/siliconflow/siliconflow.py +3 -4
- agno/models/together/together.py +4 -5
- agno/models/vercel/v0.py +4 -5
- agno/models/vllm/vllm.py +19 -14
- agno/models/xai/xai.py +4 -5
- agno/os/app.py +104 -0
- agno/os/config.py +13 -0
- agno/os/interfaces/whatsapp/router.py +0 -1
- agno/os/mcp.py +1 -0
- agno/os/router.py +31 -0
- agno/os/routers/traces/__init__.py +3 -0
- agno/os/routers/traces/schemas.py +414 -0
- agno/os/routers/traces/traces.py +499 -0
- agno/os/schema.py +10 -1
- agno/os/utils.py +57 -0
- agno/run/agent.py +1 -0
- agno/run/base.py +17 -0
- agno/run/team.py +4 -0
- agno/session/team.py +1 -0
- agno/table.py +10 -0
- agno/team/team.py +214 -65
- agno/tools/function.py +10 -8
- agno/tools/nano_banana.py +1 -1
- agno/tracing/__init__.py +12 -0
- agno/tracing/exporter.py +157 -0
- agno/tracing/schemas.py +276 -0
- agno/tracing/setup.py +111 -0
- agno/utils/agent.py +4 -4
- agno/utils/hooks.py +56 -1
- agno/vectordb/qdrant/qdrant.py +22 -22
- agno/workflow/condition.py +8 -0
- agno/workflow/loop.py +8 -0
- agno/workflow/parallel.py +8 -0
- agno/workflow/router.py +8 -0
- agno/workflow/step.py +20 -0
- agno/workflow/steps.py +8 -0
- agno/workflow/workflow.py +83 -17
- {agno-2.3.4.dist-info → agno-2.3.5.dist-info}/METADATA +2 -2
- {agno-2.3.4.dist-info → agno-2.3.5.dist-info}/RECORD +112 -102
- {agno-2.3.4.dist-info → agno-2.3.5.dist-info}/WHEEL +0 -0
- {agno-2.3.4.dist-info → agno-2.3.5.dist-info}/licenses/LICENSE +0 -0
- {agno-2.3.4.dist-info → agno-2.3.5.dist-info}/top_level.txt +0 -0
agno/db/sqlite/async_sqlite.py
CHANGED
|
@@ -1,9 +1,12 @@
|
|
|
1
1
|
import time
|
|
2
2
|
from datetime import date, datetime, timedelta, timezone
|
|
3
3
|
from pathlib import Path
|
|
4
|
-
from typing import Any, Dict, List, Optional, Sequence, Tuple, Union, cast
|
|
4
|
+
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Tuple, Union, cast
|
|
5
5
|
from uuid import uuid4
|
|
6
6
|
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from agno.tracing.schemas import Span, Trace
|
|
9
|
+
|
|
7
10
|
from agno.db.base import AsyncBaseDb, SessionType
|
|
8
11
|
from agno.db.migrations.manager import MigrationManager
|
|
9
12
|
from agno.db.schemas.culture import CulturalKnowledge
|
|
@@ -28,7 +31,7 @@ from agno.utils.log import log_debug, log_error, log_info, log_warning
|
|
|
28
31
|
from agno.utils.string import generate_id
|
|
29
32
|
|
|
30
33
|
try:
|
|
31
|
-
from sqlalchemy import Column, MetaData, String, Table, func, select, text
|
|
34
|
+
from sqlalchemy import Column, MetaData, String, Table, func, select, text, update
|
|
32
35
|
from sqlalchemy.dialects import sqlite
|
|
33
36
|
from sqlalchemy.ext.asyncio import AsyncEngine, async_sessionmaker, create_async_engine
|
|
34
37
|
from sqlalchemy.schema import Index, UniqueConstraint
|
|
@@ -48,6 +51,8 @@ class AsyncSqliteDb(AsyncBaseDb):
|
|
|
48
51
|
metrics_table: Optional[str] = None,
|
|
49
52
|
eval_table: Optional[str] = None,
|
|
50
53
|
knowledge_table: Optional[str] = None,
|
|
54
|
+
traces_table: Optional[str] = None,
|
|
55
|
+
spans_table: Optional[str] = None,
|
|
51
56
|
versions_table: Optional[str] = None,
|
|
52
57
|
id: Optional[str] = None,
|
|
53
58
|
):
|
|
@@ -70,6 +75,8 @@ class AsyncSqliteDb(AsyncBaseDb):
|
|
|
70
75
|
metrics_table (Optional[str]): Name of the table to store metrics.
|
|
71
76
|
eval_table (Optional[str]): Name of the table to store evaluation runs data.
|
|
72
77
|
knowledge_table (Optional[str]): Name of the table to store knowledge documents data.
|
|
78
|
+
traces_table (Optional[str]): Name of the table to store run traces.
|
|
79
|
+
spans_table (Optional[str]): Name of the table to store span events.
|
|
73
80
|
versions_table (Optional[str]): Name of the table to store schema versions.
|
|
74
81
|
id (Optional[str]): ID of the database.
|
|
75
82
|
|
|
@@ -88,6 +95,8 @@ class AsyncSqliteDb(AsyncBaseDb):
|
|
|
88
95
|
metrics_table=metrics_table,
|
|
89
96
|
eval_table=eval_table,
|
|
90
97
|
knowledge_table=knowledge_table,
|
|
98
|
+
traces_table=traces_table,
|
|
99
|
+
spans_table=spans_table,
|
|
91
100
|
versions_table=versions_table,
|
|
92
101
|
)
|
|
93
102
|
|
|
@@ -140,7 +149,9 @@ class AsyncSqliteDb(AsyncBaseDb):
|
|
|
140
149
|
]
|
|
141
150
|
|
|
142
151
|
for table_name, table_type in tables_to_create:
|
|
143
|
-
await self._get_or_create_table(
|
|
152
|
+
await self._get_or_create_table(
|
|
153
|
+
table_name=table_name, table_type=table_type, create_table_if_not_found=True
|
|
154
|
+
)
|
|
144
155
|
|
|
145
156
|
async def _create_table(self, table_name: str, table_type: str) -> Table:
|
|
146
157
|
"""
|
|
@@ -232,12 +243,13 @@ class AsyncSqliteDb(AsyncBaseDb):
|
|
|
232
243
|
log_error(f"Could not create table '{table_name}': {e}")
|
|
233
244
|
raise e
|
|
234
245
|
|
|
235
|
-
async def _get_table(self, table_type: str) -> Optional[Table]:
|
|
246
|
+
async def _get_table(self, table_type: str, create_table_if_not_found: Optional[bool] = False) -> Optional[Table]:
|
|
236
247
|
if table_type == "sessions":
|
|
237
248
|
if not hasattr(self, "session_table"):
|
|
238
249
|
self.session_table = await self._get_or_create_table(
|
|
239
250
|
table_name=self.session_table_name,
|
|
240
251
|
table_type=table_type,
|
|
252
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
241
253
|
)
|
|
242
254
|
return self.session_table
|
|
243
255
|
|
|
@@ -246,6 +258,7 @@ class AsyncSqliteDb(AsyncBaseDb):
|
|
|
246
258
|
self.memory_table = await self._get_or_create_table(
|
|
247
259
|
table_name=self.memory_table_name,
|
|
248
260
|
table_type="memories",
|
|
261
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
249
262
|
)
|
|
250
263
|
return self.memory_table
|
|
251
264
|
|
|
@@ -254,6 +267,7 @@ class AsyncSqliteDb(AsyncBaseDb):
|
|
|
254
267
|
self.metrics_table = await self._get_or_create_table(
|
|
255
268
|
table_name=self.metrics_table_name,
|
|
256
269
|
table_type="metrics",
|
|
270
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
257
271
|
)
|
|
258
272
|
return self.metrics_table
|
|
259
273
|
|
|
@@ -262,6 +276,7 @@ class AsyncSqliteDb(AsyncBaseDb):
|
|
|
262
276
|
self.eval_table = await self._get_or_create_table(
|
|
263
277
|
table_name=self.eval_table_name,
|
|
264
278
|
table_type="evals",
|
|
279
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
265
280
|
)
|
|
266
281
|
return self.eval_table
|
|
267
282
|
|
|
@@ -270,6 +285,7 @@ class AsyncSqliteDb(AsyncBaseDb):
|
|
|
270
285
|
self.knowledge_table = await self._get_or_create_table(
|
|
271
286
|
table_name=self.knowledge_table_name,
|
|
272
287
|
table_type="knowledge",
|
|
288
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
273
289
|
)
|
|
274
290
|
return self.knowledge_table
|
|
275
291
|
|
|
@@ -278,6 +294,7 @@ class AsyncSqliteDb(AsyncBaseDb):
|
|
|
278
294
|
self.culture_table = await self._get_or_create_table(
|
|
279
295
|
table_name=self.culture_table_name,
|
|
280
296
|
table_type="culture",
|
|
297
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
281
298
|
)
|
|
282
299
|
return self.culture_table
|
|
283
300
|
|
|
@@ -286,9 +303,30 @@ class AsyncSqliteDb(AsyncBaseDb):
|
|
|
286
303
|
self.versions_table = await self._get_or_create_table(
|
|
287
304
|
table_name=self.versions_table_name,
|
|
288
305
|
table_type="versions",
|
|
306
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
289
307
|
)
|
|
290
308
|
return self.versions_table
|
|
291
309
|
|
|
310
|
+
elif table_type == "traces":
|
|
311
|
+
if not hasattr(self, "traces_table"):
|
|
312
|
+
self.traces_table = await self._get_or_create_table(
|
|
313
|
+
table_name=self.trace_table_name,
|
|
314
|
+
table_type="traces",
|
|
315
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
316
|
+
)
|
|
317
|
+
return self.traces_table
|
|
318
|
+
|
|
319
|
+
elif table_type == "spans":
|
|
320
|
+
if not hasattr(self, "spans_table"):
|
|
321
|
+
# Ensure traces table exists first (spans has FK to traces)
|
|
322
|
+
await self._get_table(table_type="traces", create_table_if_not_found=True)
|
|
323
|
+
self.spans_table = await self._get_or_create_table(
|
|
324
|
+
table_name=self.span_table_name,
|
|
325
|
+
table_type="spans",
|
|
326
|
+
create_table_if_not_found=create_table_if_not_found,
|
|
327
|
+
)
|
|
328
|
+
return self.spans_table
|
|
329
|
+
|
|
292
330
|
else:
|
|
293
331
|
raise ValueError(f"Unknown table type: '{table_type}'")
|
|
294
332
|
|
|
@@ -296,6 +334,7 @@ class AsyncSqliteDb(AsyncBaseDb):
|
|
|
296
334
|
self,
|
|
297
335
|
table_name: str,
|
|
298
336
|
table_type: str,
|
|
337
|
+
create_table_if_not_found: Optional[bool] = False,
|
|
299
338
|
) -> Table:
|
|
300
339
|
"""
|
|
301
340
|
Check if the table exists and is valid, else create it.
|
|
@@ -310,7 +349,7 @@ class AsyncSqliteDb(AsyncBaseDb):
|
|
|
310
349
|
async with self.async_session_factory() as sess, sess.begin():
|
|
311
350
|
table_is_available = await ais_table_available(session=sess, table_name=table_name)
|
|
312
351
|
|
|
313
|
-
if not table_is_available:
|
|
352
|
+
if (not table_is_available) and create_table_if_not_found:
|
|
314
353
|
return await self._create_table(table_name=table_name, table_type=table_type)
|
|
315
354
|
|
|
316
355
|
# SQLite version of table validation (no schema)
|
|
@@ -333,7 +372,7 @@ class AsyncSqliteDb(AsyncBaseDb):
|
|
|
333
372
|
|
|
334
373
|
async def get_latest_schema_version(self, table_name: str) -> str:
|
|
335
374
|
"""Get the latest version of the database schema."""
|
|
336
|
-
table = await self._get_table(table_type="versions")
|
|
375
|
+
table = await self._get_table(table_type="versions", create_table_if_not_found=True)
|
|
337
376
|
if table is None:
|
|
338
377
|
return "2.0.0"
|
|
339
378
|
async with self.async_session_factory() as sess:
|
|
@@ -350,7 +389,7 @@ class AsyncSqliteDb(AsyncBaseDb):
|
|
|
350
389
|
|
|
351
390
|
async def upsert_schema_version(self, table_name: str, version: str) -> None:
|
|
352
391
|
"""Upsert the schema version into the database."""
|
|
353
|
-
table = await self._get_table(table_type="versions")
|
|
392
|
+
table = await self._get_table(table_type="versions", create_table_if_not_found=True)
|
|
354
393
|
if table is None:
|
|
355
394
|
return
|
|
356
395
|
current_datetime = datetime.now().isoformat()
|
|
@@ -648,7 +687,7 @@ class AsyncSqliteDb(AsyncBaseDb):
|
|
|
648
687
|
Exception: If an error occurs during upserting.
|
|
649
688
|
"""
|
|
650
689
|
try:
|
|
651
|
-
table = await self._get_table(table_type="sessions")
|
|
690
|
+
table = await self._get_table(table_type="sessions", create_table_if_not_found=True)
|
|
652
691
|
if table is None:
|
|
653
692
|
return None
|
|
654
693
|
|
|
@@ -794,7 +833,7 @@ class AsyncSqliteDb(AsyncBaseDb):
|
|
|
794
833
|
return []
|
|
795
834
|
|
|
796
835
|
try:
|
|
797
|
-
table = await self._get_table(table_type="sessions")
|
|
836
|
+
table = await self._get_table(table_type="sessions", create_table_if_not_found=True)
|
|
798
837
|
if table is None:
|
|
799
838
|
log_info("Sessions table not available, falling back to individual upserts")
|
|
800
839
|
return [
|
|
@@ -2371,3 +2410,503 @@ class AsyncSqliteDb(AsyncBaseDb):
|
|
|
2371
2410
|
except Exception as e:
|
|
2372
2411
|
log_error(f"Error upserting cultural knowledge: {e}")
|
|
2373
2412
|
raise e
|
|
2413
|
+
|
|
2414
|
+
# --- Traces ---
|
|
2415
|
+
def _get_traces_base_query(self, table: Table, spans_table: Optional[Table] = None):
|
|
2416
|
+
"""Build base query for traces with aggregated span counts.
|
|
2417
|
+
|
|
2418
|
+
Args:
|
|
2419
|
+
table: The traces table.
|
|
2420
|
+
spans_table: The spans table (optional).
|
|
2421
|
+
|
|
2422
|
+
Returns:
|
|
2423
|
+
SQLAlchemy select statement with total_spans and error_count calculated dynamically.
|
|
2424
|
+
"""
|
|
2425
|
+
from sqlalchemy import case, literal
|
|
2426
|
+
|
|
2427
|
+
if spans_table is not None:
|
|
2428
|
+
# JOIN with spans table to calculate total_spans and error_count
|
|
2429
|
+
return (
|
|
2430
|
+
select(
|
|
2431
|
+
table,
|
|
2432
|
+
func.coalesce(func.count(spans_table.c.span_id), 0).label("total_spans"),
|
|
2433
|
+
func.coalesce(func.sum(case((spans_table.c.status_code == "ERROR", 1), else_=0)), 0).label(
|
|
2434
|
+
"error_count"
|
|
2435
|
+
),
|
|
2436
|
+
)
|
|
2437
|
+
.select_from(table.outerjoin(spans_table, table.c.trace_id == spans_table.c.trace_id))
|
|
2438
|
+
.group_by(table.c.trace_id)
|
|
2439
|
+
)
|
|
2440
|
+
else:
|
|
2441
|
+
# Fallback if spans table doesn't exist
|
|
2442
|
+
return select(table, literal(0).label("total_spans"), literal(0).label("error_count"))
|
|
2443
|
+
|
|
2444
|
+
async def create_trace(self, trace: "Trace") -> None:
|
|
2445
|
+
"""Create a single trace record in the database.
|
|
2446
|
+
|
|
2447
|
+
Args:
|
|
2448
|
+
trace: The Trace object to store (one per trace_id).
|
|
2449
|
+
"""
|
|
2450
|
+
try:
|
|
2451
|
+
table = await self._get_table(table_type="traces", create_table_if_not_found=True)
|
|
2452
|
+
if table is None:
|
|
2453
|
+
return
|
|
2454
|
+
|
|
2455
|
+
async with self.async_session_factory() as sess, sess.begin():
|
|
2456
|
+
# Check if trace exists
|
|
2457
|
+
result = await sess.execute(select(table).where(table.c.trace_id == trace.trace_id))
|
|
2458
|
+
existing = result.fetchone()
|
|
2459
|
+
|
|
2460
|
+
if existing:
|
|
2461
|
+
# workflow (level 3) > team (level 2) > agent (level 1) > child/unknown (level 0)
|
|
2462
|
+
|
|
2463
|
+
def get_component_level(workflow_id, team_id, agent_id, name):
|
|
2464
|
+
# Check if name indicates a root span
|
|
2465
|
+
is_root_name = ".run" in name or ".arun" in name
|
|
2466
|
+
|
|
2467
|
+
if not is_root_name:
|
|
2468
|
+
return 0 # Child span (not a root)
|
|
2469
|
+
elif workflow_id:
|
|
2470
|
+
return 3 # Workflow root
|
|
2471
|
+
elif team_id:
|
|
2472
|
+
return 2 # Team root
|
|
2473
|
+
elif agent_id:
|
|
2474
|
+
return 1 # Agent root
|
|
2475
|
+
else:
|
|
2476
|
+
return 0 # Unknown
|
|
2477
|
+
|
|
2478
|
+
existing_level = get_component_level(
|
|
2479
|
+
existing.workflow_id, existing.team_id, existing.agent_id, existing.name
|
|
2480
|
+
)
|
|
2481
|
+
new_level = get_component_level(trace.workflow_id, trace.team_id, trace.agent_id, trace.name)
|
|
2482
|
+
|
|
2483
|
+
# Only update name if new trace is from a higher or equal level
|
|
2484
|
+
should_update_name = new_level > existing_level
|
|
2485
|
+
|
|
2486
|
+
# Parse existing start_time to calculate correct duration
|
|
2487
|
+
existing_start_time_str = existing.start_time
|
|
2488
|
+
if isinstance(existing_start_time_str, str):
|
|
2489
|
+
existing_start_time = datetime.fromisoformat(existing_start_time_str.replace("Z", "+00:00"))
|
|
2490
|
+
else:
|
|
2491
|
+
existing_start_time = trace.start_time
|
|
2492
|
+
|
|
2493
|
+
recalculated_duration_ms = int((trace.end_time - existing_start_time).total_seconds() * 1000)
|
|
2494
|
+
|
|
2495
|
+
update_values = {
|
|
2496
|
+
"end_time": trace.end_time.isoformat(),
|
|
2497
|
+
"duration_ms": recalculated_duration_ms,
|
|
2498
|
+
"status": trace.status,
|
|
2499
|
+
"name": trace.name if should_update_name else existing.name,
|
|
2500
|
+
}
|
|
2501
|
+
|
|
2502
|
+
# Update context fields ONLY if new value is not None (preserve non-null values)
|
|
2503
|
+
if trace.run_id is not None:
|
|
2504
|
+
update_values["run_id"] = trace.run_id
|
|
2505
|
+
if trace.session_id is not None:
|
|
2506
|
+
update_values["session_id"] = trace.session_id
|
|
2507
|
+
if trace.user_id is not None:
|
|
2508
|
+
update_values["user_id"] = trace.user_id
|
|
2509
|
+
if trace.agent_id is not None:
|
|
2510
|
+
update_values["agent_id"] = trace.agent_id
|
|
2511
|
+
if trace.team_id is not None:
|
|
2512
|
+
update_values["team_id"] = trace.team_id
|
|
2513
|
+
if trace.workflow_id is not None:
|
|
2514
|
+
update_values["workflow_id"] = trace.workflow_id
|
|
2515
|
+
|
|
2516
|
+
log_debug(
|
|
2517
|
+
f" Updating trace with context: run_id={update_values.get('run_id', 'unchanged')}, "
|
|
2518
|
+
f"session_id={update_values.get('session_id', 'unchanged')}, "
|
|
2519
|
+
f"user_id={update_values.get('user_id', 'unchanged')}, "
|
|
2520
|
+
f"agent_id={update_values.get('agent_id', 'unchanged')}, "
|
|
2521
|
+
f"team_id={update_values.get('team_id', 'unchanged')}, "
|
|
2522
|
+
)
|
|
2523
|
+
|
|
2524
|
+
stmt = update(table).where(table.c.trace_id == trace.trace_id).values(**update_values)
|
|
2525
|
+
await sess.execute(stmt)
|
|
2526
|
+
else:
|
|
2527
|
+
trace_dict = trace.to_dict()
|
|
2528
|
+
trace_dict.pop("total_spans", None)
|
|
2529
|
+
trace_dict.pop("error_count", None)
|
|
2530
|
+
stmt = sqlite.insert(table).values(trace_dict)
|
|
2531
|
+
await sess.execute(stmt)
|
|
2532
|
+
|
|
2533
|
+
except Exception as e:
|
|
2534
|
+
log_error(f"Error creating trace: {e}")
|
|
2535
|
+
# Don't raise - tracing should not break the main application flow
|
|
2536
|
+
|
|
2537
|
+
async def get_trace(
|
|
2538
|
+
self,
|
|
2539
|
+
trace_id: Optional[str] = None,
|
|
2540
|
+
run_id: Optional[str] = None,
|
|
2541
|
+
):
|
|
2542
|
+
"""Get a single trace by trace_id or other filters.
|
|
2543
|
+
|
|
2544
|
+
Args:
|
|
2545
|
+
trace_id: The unique trace identifier.
|
|
2546
|
+
run_id: Filter by run ID (returns first match).
|
|
2547
|
+
|
|
2548
|
+
Returns:
|
|
2549
|
+
Optional[Trace]: The trace if found, None otherwise.
|
|
2550
|
+
|
|
2551
|
+
Note:
|
|
2552
|
+
If multiple filters are provided, trace_id takes precedence.
|
|
2553
|
+
For other filters, the most recent trace is returned.
|
|
2554
|
+
"""
|
|
2555
|
+
try:
|
|
2556
|
+
from agno.tracing.schemas import Trace
|
|
2557
|
+
|
|
2558
|
+
table = await self._get_table(table_type="traces")
|
|
2559
|
+
if table is None:
|
|
2560
|
+
return None
|
|
2561
|
+
|
|
2562
|
+
# Get spans table for JOIN
|
|
2563
|
+
spans_table = await self._get_table(table_type="spans")
|
|
2564
|
+
|
|
2565
|
+
async with self.async_session_factory() as sess:
|
|
2566
|
+
# Build query with aggregated span counts
|
|
2567
|
+
stmt = self._get_traces_base_query(table, spans_table)
|
|
2568
|
+
|
|
2569
|
+
if trace_id:
|
|
2570
|
+
stmt = stmt.where(table.c.trace_id == trace_id)
|
|
2571
|
+
elif run_id:
|
|
2572
|
+
stmt = stmt.where(table.c.run_id == run_id)
|
|
2573
|
+
else:
|
|
2574
|
+
log_debug("get_trace called without any filter parameters")
|
|
2575
|
+
return None
|
|
2576
|
+
|
|
2577
|
+
# Order by most recent and get first result
|
|
2578
|
+
stmt = stmt.order_by(table.c.start_time.desc()).limit(1)
|
|
2579
|
+
result = await sess.execute(stmt)
|
|
2580
|
+
row = result.fetchone()
|
|
2581
|
+
|
|
2582
|
+
if row:
|
|
2583
|
+
return Trace.from_dict(dict(row._mapping))
|
|
2584
|
+
return None
|
|
2585
|
+
|
|
2586
|
+
except Exception as e:
|
|
2587
|
+
log_error(f"Error getting trace: {e}")
|
|
2588
|
+
return None
|
|
2589
|
+
|
|
2590
|
+
async def get_traces(
|
|
2591
|
+
self,
|
|
2592
|
+
run_id: Optional[str] = None,
|
|
2593
|
+
session_id: Optional[str] = None,
|
|
2594
|
+
user_id: Optional[str] = None,
|
|
2595
|
+
agent_id: Optional[str] = None,
|
|
2596
|
+
team_id: Optional[str] = None,
|
|
2597
|
+
workflow_id: Optional[str] = None,
|
|
2598
|
+
status: Optional[str] = None,
|
|
2599
|
+
start_time: Optional[datetime] = None,
|
|
2600
|
+
end_time: Optional[datetime] = None,
|
|
2601
|
+
limit: Optional[int] = 20,
|
|
2602
|
+
page: Optional[int] = 1,
|
|
2603
|
+
) -> tuple[List, int]:
|
|
2604
|
+
"""Get traces matching the provided filters with pagination.
|
|
2605
|
+
|
|
2606
|
+
Args:
|
|
2607
|
+
run_id: Filter by run ID.
|
|
2608
|
+
session_id: Filter by session ID.
|
|
2609
|
+
user_id: Filter by user ID.
|
|
2610
|
+
agent_id: Filter by agent ID.
|
|
2611
|
+
team_id: Filter by team ID.
|
|
2612
|
+
workflow_id: Filter by workflow ID.
|
|
2613
|
+
status: Filter by status (OK, ERROR, UNSET).
|
|
2614
|
+
start_time: Filter traces starting after this datetime.
|
|
2615
|
+
end_time: Filter traces ending before this datetime.
|
|
2616
|
+
limit: Maximum number of traces to return per page.
|
|
2617
|
+
page: Page number (1-indexed).
|
|
2618
|
+
|
|
2619
|
+
Returns:
|
|
2620
|
+
tuple[List[Trace], int]: Tuple of (list of matching traces, total count).
|
|
2621
|
+
"""
|
|
2622
|
+
try:
|
|
2623
|
+
from agno.tracing.schemas import Trace
|
|
2624
|
+
|
|
2625
|
+
log_debug(
|
|
2626
|
+
f"get_traces called with filters: run_id={run_id}, session_id={session_id}, user_id={user_id}, agent_id={agent_id}, page={page}, limit={limit}"
|
|
2627
|
+
)
|
|
2628
|
+
|
|
2629
|
+
table = await self._get_table(table_type="traces")
|
|
2630
|
+
if table is None:
|
|
2631
|
+
log_debug("Traces table not found")
|
|
2632
|
+
return [], 0
|
|
2633
|
+
|
|
2634
|
+
# Get spans table for JOIN
|
|
2635
|
+
spans_table = await self._get_table(table_type="spans")
|
|
2636
|
+
|
|
2637
|
+
async with self.async_session_factory() as sess:
|
|
2638
|
+
# Build base query with aggregated span counts
|
|
2639
|
+
base_stmt = self._get_traces_base_query(table, spans_table)
|
|
2640
|
+
|
|
2641
|
+
# Apply filters
|
|
2642
|
+
if run_id:
|
|
2643
|
+
base_stmt = base_stmt.where(table.c.run_id == run_id)
|
|
2644
|
+
if session_id:
|
|
2645
|
+
log_debug(f"Filtering by session_id={session_id}")
|
|
2646
|
+
base_stmt = base_stmt.where(table.c.session_id == session_id)
|
|
2647
|
+
if user_id:
|
|
2648
|
+
base_stmt = base_stmt.where(table.c.user_id == user_id)
|
|
2649
|
+
if agent_id:
|
|
2650
|
+
base_stmt = base_stmt.where(table.c.agent_id == agent_id)
|
|
2651
|
+
if team_id:
|
|
2652
|
+
base_stmt = base_stmt.where(table.c.team_id == team_id)
|
|
2653
|
+
if workflow_id:
|
|
2654
|
+
base_stmt = base_stmt.where(table.c.workflow_id == workflow_id)
|
|
2655
|
+
if status:
|
|
2656
|
+
base_stmt = base_stmt.where(table.c.status == status)
|
|
2657
|
+
if start_time:
|
|
2658
|
+
# Convert datetime to ISO string for comparison
|
|
2659
|
+
base_stmt = base_stmt.where(table.c.start_time >= start_time.isoformat())
|
|
2660
|
+
if end_time:
|
|
2661
|
+
# Convert datetime to ISO string for comparison
|
|
2662
|
+
base_stmt = base_stmt.where(table.c.end_time <= end_time.isoformat())
|
|
2663
|
+
|
|
2664
|
+
# Get total count
|
|
2665
|
+
count_stmt = select(func.count()).select_from(base_stmt.alias())
|
|
2666
|
+
total_count = await sess.scalar(count_stmt) or 0
|
|
2667
|
+
log_debug(f"Total matching traces: {total_count}")
|
|
2668
|
+
|
|
2669
|
+
# Apply pagination
|
|
2670
|
+
offset = (page - 1) * limit if page and limit else 0
|
|
2671
|
+
paginated_stmt = base_stmt.order_by(table.c.start_time.desc()).limit(limit).offset(offset)
|
|
2672
|
+
|
|
2673
|
+
result = await sess.execute(paginated_stmt)
|
|
2674
|
+
results = result.fetchall()
|
|
2675
|
+
log_debug(f"Returning page {page} with {len(results)} traces")
|
|
2676
|
+
|
|
2677
|
+
traces = [Trace.from_dict(dict(row._mapping)) for row in results]
|
|
2678
|
+
return traces, total_count
|
|
2679
|
+
|
|
2680
|
+
except Exception as e:
|
|
2681
|
+
log_error(f"Error getting traces: {e}")
|
|
2682
|
+
return [], 0
|
|
2683
|
+
|
|
2684
|
+
async def get_trace_stats(
|
|
2685
|
+
self,
|
|
2686
|
+
user_id: Optional[str] = None,
|
|
2687
|
+
agent_id: Optional[str] = None,
|
|
2688
|
+
team_id: Optional[str] = None,
|
|
2689
|
+
workflow_id: Optional[str] = None,
|
|
2690
|
+
start_time: Optional[datetime] = None,
|
|
2691
|
+
end_time: Optional[datetime] = None,
|
|
2692
|
+
limit: Optional[int] = 20,
|
|
2693
|
+
page: Optional[int] = 1,
|
|
2694
|
+
) -> tuple[List[Dict[str, Any]], int]:
|
|
2695
|
+
"""Get trace statistics grouped by session.
|
|
2696
|
+
|
|
2697
|
+
Args:
|
|
2698
|
+
user_id: Filter by user ID.
|
|
2699
|
+
agent_id: Filter by agent ID.
|
|
2700
|
+
team_id: Filter by team ID.
|
|
2701
|
+
workflow_id: Filter by workflow ID.
|
|
2702
|
+
start_time: Filter sessions with traces created after this datetime.
|
|
2703
|
+
end_time: Filter sessions with traces created before this datetime.
|
|
2704
|
+
limit: Maximum number of sessions to return per page.
|
|
2705
|
+
page: Page number (1-indexed).
|
|
2706
|
+
|
|
2707
|
+
Returns:
|
|
2708
|
+
tuple[List[Dict], int]: Tuple of (list of session stats dicts, total count).
|
|
2709
|
+
Each dict contains: session_id, user_id, agent_id, team_id, total_traces,
|
|
2710
|
+
workflow_id, first_trace_at, last_trace_at.
|
|
2711
|
+
"""
|
|
2712
|
+
try:
|
|
2713
|
+
log_debug(
|
|
2714
|
+
f"get_trace_stats called with filters: user_id={user_id}, agent_id={agent_id}, "
|
|
2715
|
+
f"workflow_id={workflow_id}, team_id={team_id}, "
|
|
2716
|
+
f"start_time={start_time}, end_time={end_time}, page={page}, limit={limit}"
|
|
2717
|
+
)
|
|
2718
|
+
|
|
2719
|
+
table = await self._get_table(table_type="traces")
|
|
2720
|
+
if table is None:
|
|
2721
|
+
log_debug("Traces table not found")
|
|
2722
|
+
return [], 0
|
|
2723
|
+
|
|
2724
|
+
async with self.async_session_factory() as sess:
|
|
2725
|
+
# Build base query grouped by session_id
|
|
2726
|
+
base_stmt = (
|
|
2727
|
+
select(
|
|
2728
|
+
table.c.session_id,
|
|
2729
|
+
table.c.user_id,
|
|
2730
|
+
table.c.agent_id,
|
|
2731
|
+
table.c.team_id,
|
|
2732
|
+
table.c.workflow_id,
|
|
2733
|
+
func.count(table.c.trace_id).label("total_traces"),
|
|
2734
|
+
func.min(table.c.created_at).label("first_trace_at"),
|
|
2735
|
+
func.max(table.c.created_at).label("last_trace_at"),
|
|
2736
|
+
)
|
|
2737
|
+
.where(table.c.session_id.isnot(None)) # Only sessions with session_id
|
|
2738
|
+
.group_by(
|
|
2739
|
+
table.c.session_id, table.c.user_id, table.c.agent_id, table.c.team_id, table.c.workflow_id
|
|
2740
|
+
)
|
|
2741
|
+
)
|
|
2742
|
+
|
|
2743
|
+
# Apply filters
|
|
2744
|
+
if user_id:
|
|
2745
|
+
base_stmt = base_stmt.where(table.c.user_id == user_id)
|
|
2746
|
+
if workflow_id:
|
|
2747
|
+
base_stmt = base_stmt.where(table.c.workflow_id == workflow_id)
|
|
2748
|
+
if team_id:
|
|
2749
|
+
base_stmt = base_stmt.where(table.c.team_id == team_id)
|
|
2750
|
+
if agent_id:
|
|
2751
|
+
base_stmt = base_stmt.where(table.c.agent_id == agent_id)
|
|
2752
|
+
if start_time:
|
|
2753
|
+
# Convert datetime to ISO string for comparison
|
|
2754
|
+
base_stmt = base_stmt.where(table.c.created_at >= start_time.isoformat())
|
|
2755
|
+
if end_time:
|
|
2756
|
+
# Convert datetime to ISO string for comparison
|
|
2757
|
+
base_stmt = base_stmt.where(table.c.created_at <= end_time.isoformat())
|
|
2758
|
+
|
|
2759
|
+
# Get total count of sessions
|
|
2760
|
+
count_stmt = select(func.count()).select_from(base_stmt.alias())
|
|
2761
|
+
total_count = await sess.scalar(count_stmt) or 0
|
|
2762
|
+
log_debug(f"Total matching sessions: {total_count}")
|
|
2763
|
+
|
|
2764
|
+
# Apply pagination and ordering
|
|
2765
|
+
offset = (page - 1) * limit if page and limit else 0
|
|
2766
|
+
paginated_stmt = base_stmt.order_by(func.max(table.c.created_at).desc()).limit(limit).offset(offset)
|
|
2767
|
+
|
|
2768
|
+
result = await sess.execute(paginated_stmt)
|
|
2769
|
+
results = result.fetchall()
|
|
2770
|
+
log_debug(f"Returning page {page} with {len(results)} session stats")
|
|
2771
|
+
|
|
2772
|
+
# Convert to list of dicts with datetime objects
|
|
2773
|
+
stats_list = []
|
|
2774
|
+
for row in results:
|
|
2775
|
+
# Convert ISO strings to datetime objects
|
|
2776
|
+
first_trace_at_str = row.first_trace_at
|
|
2777
|
+
last_trace_at_str = row.last_trace_at
|
|
2778
|
+
|
|
2779
|
+
# Parse ISO format strings to datetime objects
|
|
2780
|
+
first_trace_at = datetime.fromisoformat(first_trace_at_str.replace("Z", "+00:00"))
|
|
2781
|
+
last_trace_at = datetime.fromisoformat(last_trace_at_str.replace("Z", "+00:00"))
|
|
2782
|
+
|
|
2783
|
+
stats_list.append(
|
|
2784
|
+
{
|
|
2785
|
+
"session_id": row.session_id,
|
|
2786
|
+
"user_id": row.user_id,
|
|
2787
|
+
"agent_id": row.agent_id,
|
|
2788
|
+
"team_id": row.team_id,
|
|
2789
|
+
"workflow_id": row.workflow_id,
|
|
2790
|
+
"total_traces": row.total_traces,
|
|
2791
|
+
"first_trace_at": first_trace_at,
|
|
2792
|
+
"last_trace_at": last_trace_at,
|
|
2793
|
+
}
|
|
2794
|
+
)
|
|
2795
|
+
|
|
2796
|
+
return stats_list, total_count
|
|
2797
|
+
|
|
2798
|
+
except Exception as e:
|
|
2799
|
+
log_error(f"Error getting trace stats: {e}")
|
|
2800
|
+
return [], 0
|
|
2801
|
+
|
|
2802
|
+
# --- Spans ---
|
|
2803
|
+
async def create_span(self, span: "Span") -> None:
|
|
2804
|
+
"""Create a single span in the database.
|
|
2805
|
+
|
|
2806
|
+
Args:
|
|
2807
|
+
span: The Span object to store.
|
|
2808
|
+
"""
|
|
2809
|
+
try:
|
|
2810
|
+
table = await self._get_table(table_type="spans", create_table_if_not_found=True)
|
|
2811
|
+
if table is None:
|
|
2812
|
+
return
|
|
2813
|
+
|
|
2814
|
+
async with self.async_session_factory() as sess, sess.begin():
|
|
2815
|
+
stmt = sqlite.insert(table).values(span.to_dict())
|
|
2816
|
+
await sess.execute(stmt)
|
|
2817
|
+
|
|
2818
|
+
except Exception as e:
|
|
2819
|
+
log_error(f"Error creating span: {e}")
|
|
2820
|
+
|
|
2821
|
+
async def create_spans(self, spans: List) -> None:
|
|
2822
|
+
"""Create multiple spans in the database as a batch.
|
|
2823
|
+
|
|
2824
|
+
Args:
|
|
2825
|
+
spans: List of Span objects to store.
|
|
2826
|
+
"""
|
|
2827
|
+
if not spans:
|
|
2828
|
+
return
|
|
2829
|
+
|
|
2830
|
+
try:
|
|
2831
|
+
table = await self._get_table(table_type="spans", create_table_if_not_found=True)
|
|
2832
|
+
if table is None:
|
|
2833
|
+
return
|
|
2834
|
+
|
|
2835
|
+
async with self.async_session_factory() as sess, sess.begin():
|
|
2836
|
+
for span in spans:
|
|
2837
|
+
stmt = sqlite.insert(table).values(span.to_dict())
|
|
2838
|
+
await sess.execute(stmt)
|
|
2839
|
+
|
|
2840
|
+
except Exception as e:
|
|
2841
|
+
log_error(f"Error creating spans batch: {e}")
|
|
2842
|
+
|
|
2843
|
+
async def get_span(self, span_id: str):
|
|
2844
|
+
"""Get a single span by its span_id.
|
|
2845
|
+
|
|
2846
|
+
Args:
|
|
2847
|
+
span_id: The unique span identifier.
|
|
2848
|
+
|
|
2849
|
+
Returns:
|
|
2850
|
+
Optional[Span]: The span if found, None otherwise.
|
|
2851
|
+
"""
|
|
2852
|
+
try:
|
|
2853
|
+
from agno.tracing.schemas import Span
|
|
2854
|
+
|
|
2855
|
+
table = await self._get_table(table_type="spans")
|
|
2856
|
+
if table is None:
|
|
2857
|
+
return None
|
|
2858
|
+
|
|
2859
|
+
async with self.async_session_factory() as sess:
|
|
2860
|
+
stmt = select(table).where(table.c.span_id == span_id)
|
|
2861
|
+
result = await sess.execute(stmt)
|
|
2862
|
+
row = result.fetchone()
|
|
2863
|
+
if row:
|
|
2864
|
+
return Span.from_dict(dict(row._mapping))
|
|
2865
|
+
return None
|
|
2866
|
+
|
|
2867
|
+
except Exception as e:
|
|
2868
|
+
log_error(f"Error getting span: {e}")
|
|
2869
|
+
return None
|
|
2870
|
+
|
|
2871
|
+
async def get_spans(
|
|
2872
|
+
self,
|
|
2873
|
+
trace_id: Optional[str] = None,
|
|
2874
|
+
parent_span_id: Optional[str] = None,
|
|
2875
|
+
limit: Optional[int] = 1000,
|
|
2876
|
+
) -> List:
|
|
2877
|
+
"""Get spans matching the provided filters.
|
|
2878
|
+
|
|
2879
|
+
Args:
|
|
2880
|
+
trace_id: Filter by trace ID.
|
|
2881
|
+
parent_span_id: Filter by parent span ID.
|
|
2882
|
+
limit: Maximum number of spans to return.
|
|
2883
|
+
|
|
2884
|
+
Returns:
|
|
2885
|
+
List[Span]: List of matching spans.
|
|
2886
|
+
"""
|
|
2887
|
+
try:
|
|
2888
|
+
from agno.tracing.schemas import Span
|
|
2889
|
+
|
|
2890
|
+
table = await self._get_table(table_type="spans")
|
|
2891
|
+
if table is None:
|
|
2892
|
+
return []
|
|
2893
|
+
|
|
2894
|
+
async with self.async_session_factory() as sess:
|
|
2895
|
+
stmt = select(table)
|
|
2896
|
+
|
|
2897
|
+
# Apply filters
|
|
2898
|
+
if trace_id:
|
|
2899
|
+
stmt = stmt.where(table.c.trace_id == trace_id)
|
|
2900
|
+
if parent_span_id:
|
|
2901
|
+
stmt = stmt.where(table.c.parent_span_id == parent_span_id)
|
|
2902
|
+
|
|
2903
|
+
if limit:
|
|
2904
|
+
stmt = stmt.limit(limit)
|
|
2905
|
+
|
|
2906
|
+
result = await sess.execute(stmt)
|
|
2907
|
+
results = result.fetchall()
|
|
2908
|
+
return [Span.from_dict(dict(row._mapping)) for row in results]
|
|
2909
|
+
|
|
2910
|
+
except Exception as e:
|
|
2911
|
+
log_error(f"Error getting spans: {e}")
|
|
2912
|
+
return []
|