agno 2.3.12__py3-none-any.whl → 2.3.14__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agno/agent/agent.py +1125 -1401
- agno/eval/__init__.py +21 -8
- agno/knowledge/embedder/azure_openai.py +0 -1
- agno/knowledge/embedder/google.py +1 -1
- agno/models/anthropic/claude.py +4 -1
- agno/models/azure/openai_chat.py +11 -5
- agno/models/base.py +8 -4
- agno/models/openai/chat.py +0 -2
- agno/models/openai/responses.py +2 -2
- agno/os/app.py +112 -5
- agno/os/auth.py +190 -3
- agno/os/config.py +9 -0
- agno/os/interfaces/a2a/router.py +619 -9
- agno/os/interfaces/a2a/utils.py +31 -32
- agno/os/middleware/__init__.py +2 -0
- agno/os/middleware/jwt.py +670 -108
- agno/os/router.py +0 -1
- agno/os/routers/agents/router.py +22 -4
- agno/os/routers/agents/schema.py +14 -1
- agno/os/routers/teams/router.py +20 -4
- agno/os/routers/teams/schema.py +14 -1
- agno/os/routers/workflows/router.py +88 -9
- agno/os/scopes.py +469 -0
- agno/os/utils.py +86 -53
- agno/reasoning/anthropic.py +85 -1
- agno/reasoning/azure_ai_foundry.py +93 -1
- agno/reasoning/deepseek.py +91 -1
- agno/reasoning/gemini.py +81 -1
- agno/reasoning/groq.py +103 -1
- agno/reasoning/manager.py +1244 -0
- agno/reasoning/ollama.py +93 -1
- agno/reasoning/openai.py +113 -1
- agno/reasoning/vertexai.py +85 -1
- agno/run/agent.py +11 -0
- agno/run/base.py +1 -1
- agno/run/team.py +11 -0
- agno/session/team.py +0 -3
- agno/team/team.py +1204 -1452
- agno/tools/postgres.py +1 -1
- agno/utils/cryptography.py +22 -0
- agno/utils/events.py +69 -2
- agno/utils/hooks.py +4 -10
- agno/utils/print_response/agent.py +52 -2
- agno/utils/print_response/team.py +141 -10
- agno/utils/prompts.py +8 -6
- agno/utils/string.py +46 -0
- agno/utils/team.py +1 -1
- agno/vectordb/chroma/chromadb.py +1 -0
- agno/vectordb/milvus/milvus.py +32 -3
- agno/vectordb/redis/redisdb.py +16 -2
- {agno-2.3.12.dist-info → agno-2.3.14.dist-info}/METADATA +3 -2
- {agno-2.3.12.dist-info → agno-2.3.14.dist-info}/RECORD +55 -52
- {agno-2.3.12.dist-info → agno-2.3.14.dist-info}/WHEEL +0 -0
- {agno-2.3.12.dist-info → agno-2.3.14.dist-info}/licenses/LICENSE +0 -0
- {agno-2.3.12.dist-info → agno-2.3.14.dist-info}/top_level.txt +0 -0
agno/os/interfaces/a2a/utils.py
CHANGED
|
@@ -95,7 +95,6 @@ async def map_a2a_request_to_run_input(request_body: dict, stream: bool = True)
|
|
|
95
95
|
```json
|
|
96
96
|
{
|
|
97
97
|
"jsonrpc": "2.0",
|
|
98
|
-
"method": "message/send",
|
|
99
98
|
"id": "id",
|
|
100
99
|
"params": {
|
|
101
100
|
"message": {
|
|
@@ -325,7 +324,7 @@ async def stream_a2a_response(
|
|
|
325
324
|
final=False,
|
|
326
325
|
)
|
|
327
326
|
response = SendStreamingMessageSuccessResponse(id=request_id, result=status_event)
|
|
328
|
-
yield json.dumps(response.model_dump(exclude_none=True))
|
|
327
|
+
yield f"event: TaskStatusUpdateEvent\ndata: {json.dumps(response.model_dump(exclude_none=True))}\n\n"
|
|
329
328
|
|
|
330
329
|
# 2. Send all content and secondary events
|
|
331
330
|
|
|
@@ -341,7 +340,7 @@ async def stream_a2a_response(
|
|
|
341
340
|
metadata={"agno_content_category": "content"},
|
|
342
341
|
)
|
|
343
342
|
response = SendStreamingMessageSuccessResponse(id=request_id, result=message)
|
|
344
|
-
yield json.dumps(response.model_dump(exclude_none=True))
|
|
343
|
+
yield f"event: Message\ndata: {json.dumps(response.model_dump(exclude_none=True))}\n\n"
|
|
345
344
|
|
|
346
345
|
# Send tool call events
|
|
347
346
|
elif isinstance(event, (ToolCallStartedEvent, TeamToolCallStartedEvent)):
|
|
@@ -361,7 +360,7 @@ async def stream_a2a_response(
|
|
|
361
360
|
metadata=metadata,
|
|
362
361
|
)
|
|
363
362
|
response = SendStreamingMessageSuccessResponse(id=request_id, result=status_event)
|
|
364
|
-
yield json.dumps(response.model_dump(exclude_none=True))
|
|
363
|
+
yield f"event: TaskStatusUpdateEvent\ndata: {json.dumps(response.model_dump(exclude_none=True))}\n\n"
|
|
365
364
|
|
|
366
365
|
elif isinstance(event, (ToolCallCompletedEvent, TeamToolCallCompletedEvent)):
|
|
367
366
|
metadata = {"agno_event_type": "tool_call_completed"}
|
|
@@ -380,7 +379,7 @@ async def stream_a2a_response(
|
|
|
380
379
|
metadata=metadata,
|
|
381
380
|
)
|
|
382
381
|
response = SendStreamingMessageSuccessResponse(id=request_id, result=status_event)
|
|
383
|
-
yield json.dumps(response.model_dump(exclude_none=True))
|
|
382
|
+
yield f"event: TaskStatusUpdateEvent\ndata: {json.dumps(response.model_dump(exclude_none=True))}\n\n"
|
|
384
383
|
|
|
385
384
|
# Send reasoning events
|
|
386
385
|
elif isinstance(event, (ReasoningStartedEvent, TeamReasoningStartedEvent)):
|
|
@@ -392,7 +391,7 @@ async def stream_a2a_response(
|
|
|
392
391
|
metadata={"agno_event_type": "reasoning_started"},
|
|
393
392
|
)
|
|
394
393
|
response = SendStreamingMessageSuccessResponse(id=request_id, result=status_event)
|
|
395
|
-
yield json.dumps(response.model_dump(exclude_none=True))
|
|
394
|
+
yield f"event: TaskStatusUpdateEvent\ndata: {json.dumps(response.model_dump(exclude_none=True))}\n\n"
|
|
396
395
|
|
|
397
396
|
elif isinstance(event, (ReasoningStepEvent, TeamReasoningStepEvent)):
|
|
398
397
|
if event.reasoning_content:
|
|
@@ -415,7 +414,7 @@ async def stream_a2a_response(
|
|
|
415
414
|
metadata={"agno_content_category": "reasoning", "agno_event_type": "reasoning_step"},
|
|
416
415
|
)
|
|
417
416
|
response = SendStreamingMessageSuccessResponse(id=request_id, result=reasoning_message)
|
|
418
|
-
yield json.dumps(response.model_dump(exclude_none=True))
|
|
417
|
+
yield f"event: Message\ndata: {json.dumps(response.model_dump(exclude_none=True))}\n\n"
|
|
419
418
|
|
|
420
419
|
elif isinstance(event, (ReasoningCompletedEvent, TeamReasoningCompletedEvent)):
|
|
421
420
|
status_event = TaskStatusUpdateEvent(
|
|
@@ -426,7 +425,7 @@ async def stream_a2a_response(
|
|
|
426
425
|
metadata={"agno_event_type": "reasoning_completed"},
|
|
427
426
|
)
|
|
428
427
|
response = SendStreamingMessageSuccessResponse(id=request_id, result=status_event)
|
|
429
|
-
yield json.dumps(response.model_dump(exclude_none=True))
|
|
428
|
+
yield f"event: TaskStatusUpdateEvent\ndata: {json.dumps(response.model_dump(exclude_none=True))}\n\n"
|
|
430
429
|
|
|
431
430
|
# Send memory update events
|
|
432
431
|
elif isinstance(event, (MemoryUpdateStartedEvent, TeamMemoryUpdateStartedEvent)):
|
|
@@ -438,7 +437,7 @@ async def stream_a2a_response(
|
|
|
438
437
|
metadata={"agno_event_type": "memory_update_started"},
|
|
439
438
|
)
|
|
440
439
|
response = SendStreamingMessageSuccessResponse(id=request_id, result=status_event)
|
|
441
|
-
yield json.dumps(response.model_dump(exclude_none=True))
|
|
440
|
+
yield f"event: TaskStatusUpdateEvent\ndata: {json.dumps(response.model_dump(exclude_none=True))}\n\n"
|
|
442
441
|
|
|
443
442
|
elif isinstance(event, (MemoryUpdateCompletedEvent, TeamMemoryUpdateCompletedEvent)):
|
|
444
443
|
status_event = TaskStatusUpdateEvent(
|
|
@@ -449,7 +448,7 @@ async def stream_a2a_response(
|
|
|
449
448
|
metadata={"agno_event_type": "memory_update_completed"},
|
|
450
449
|
)
|
|
451
450
|
response = SendStreamingMessageSuccessResponse(id=request_id, result=status_event)
|
|
452
|
-
yield json.dumps(response.model_dump(exclude_none=True))
|
|
451
|
+
yield f"event: TaskStatusUpdateEvent\ndata: {json.dumps(response.model_dump(exclude_none=True))}\n\n"
|
|
453
452
|
|
|
454
453
|
# Send workflow events
|
|
455
454
|
elif isinstance(event, WorkflowStepStartedEvent):
|
|
@@ -465,7 +464,7 @@ async def stream_a2a_response(
|
|
|
465
464
|
metadata=metadata,
|
|
466
465
|
)
|
|
467
466
|
response = SendStreamingMessageSuccessResponse(id=request_id, result=status_event)
|
|
468
|
-
yield json.dumps(response.model_dump(exclude_none=True))
|
|
467
|
+
yield f"event: TaskStatusUpdateEvent\ndata: {json.dumps(response.model_dump(exclude_none=True))}\n\n"
|
|
469
468
|
|
|
470
469
|
elif isinstance(event, WorkflowStepCompletedEvent):
|
|
471
470
|
metadata = {"agno_event_type": "workflow_step_completed"}
|
|
@@ -480,7 +479,7 @@ async def stream_a2a_response(
|
|
|
480
479
|
metadata=metadata,
|
|
481
480
|
)
|
|
482
481
|
response = SendStreamingMessageSuccessResponse(id=request_id, result=status_event)
|
|
483
|
-
yield json.dumps(response.model_dump(exclude_none=True))
|
|
482
|
+
yield f"event: TaskStatusUpdateEvent\ndata: {json.dumps(response.model_dump(exclude_none=True))}\n\n"
|
|
484
483
|
|
|
485
484
|
elif isinstance(event, WorkflowStepErrorEvent):
|
|
486
485
|
metadata = {"agno_event_type": "workflow_step_error"}
|
|
@@ -497,7 +496,7 @@ async def stream_a2a_response(
|
|
|
497
496
|
metadata=metadata,
|
|
498
497
|
)
|
|
499
498
|
response = SendStreamingMessageSuccessResponse(id=request_id, result=status_event)
|
|
500
|
-
yield json.dumps(response.model_dump(exclude_none=True))
|
|
499
|
+
yield f"event: TaskStatusUpdateEvent\ndata: {json.dumps(response.model_dump(exclude_none=True))}\n\n"
|
|
501
500
|
|
|
502
501
|
# Send loop events
|
|
503
502
|
elif isinstance(event, LoopExecutionStartedEvent):
|
|
@@ -515,7 +514,7 @@ async def stream_a2a_response(
|
|
|
515
514
|
metadata=metadata,
|
|
516
515
|
)
|
|
517
516
|
response = SendStreamingMessageSuccessResponse(id=request_id, result=status_event)
|
|
518
|
-
yield json.dumps(response.model_dump(exclude_none=True))
|
|
517
|
+
yield f"event: TaskStatusUpdateEvent\ndata: {json.dumps(response.model_dump(exclude_none=True))}\n\n"
|
|
519
518
|
|
|
520
519
|
elif isinstance(event, LoopIterationStartedEvent):
|
|
521
520
|
metadata = {"agno_event_type": "loop_iteration_started"}
|
|
@@ -534,7 +533,7 @@ async def stream_a2a_response(
|
|
|
534
533
|
metadata=metadata,
|
|
535
534
|
)
|
|
536
535
|
response = SendStreamingMessageSuccessResponse(id=request_id, result=status_event)
|
|
537
|
-
yield json.dumps(response.model_dump(exclude_none=True))
|
|
536
|
+
yield f"event: TaskStatusUpdateEvent\ndata: {json.dumps(response.model_dump(exclude_none=True))}\n\n"
|
|
538
537
|
|
|
539
538
|
elif isinstance(event, LoopIterationCompletedEvent):
|
|
540
539
|
metadata = {"agno_event_type": "loop_iteration_completed"}
|
|
@@ -553,7 +552,7 @@ async def stream_a2a_response(
|
|
|
553
552
|
metadata=metadata,
|
|
554
553
|
)
|
|
555
554
|
response = SendStreamingMessageSuccessResponse(id=request_id, result=status_event)
|
|
556
|
-
yield json.dumps(response.model_dump(exclude_none=True))
|
|
555
|
+
yield f"event: TaskStatusUpdateEvent\ndata: {json.dumps(response.model_dump(exclude_none=True))}\n\n"
|
|
557
556
|
|
|
558
557
|
elif isinstance(event, LoopExecutionCompletedEvent):
|
|
559
558
|
metadata = {"agno_event_type": "loop_execution_completed"}
|
|
@@ -570,7 +569,7 @@ async def stream_a2a_response(
|
|
|
570
569
|
metadata=metadata,
|
|
571
570
|
)
|
|
572
571
|
response = SendStreamingMessageSuccessResponse(id=request_id, result=status_event)
|
|
573
|
-
yield json.dumps(response.model_dump(exclude_none=True))
|
|
572
|
+
yield f"event: TaskStatusUpdateEvent\ndata: {json.dumps(response.model_dump(exclude_none=True))}\n\n"
|
|
574
573
|
|
|
575
574
|
# Send parallel events
|
|
576
575
|
elif isinstance(event, ParallelExecutionStartedEvent):
|
|
@@ -588,7 +587,7 @@ async def stream_a2a_response(
|
|
|
588
587
|
metadata=metadata,
|
|
589
588
|
)
|
|
590
589
|
response = SendStreamingMessageSuccessResponse(id=request_id, result=status_event)
|
|
591
|
-
yield json.dumps(response.model_dump(exclude_none=True))
|
|
590
|
+
yield f"event: TaskStatusUpdateEvent\ndata: {json.dumps(response.model_dump(exclude_none=True))}\n\n"
|
|
592
591
|
|
|
593
592
|
elif isinstance(event, ParallelExecutionCompletedEvent):
|
|
594
593
|
metadata = {"agno_event_type": "parallel_execution_completed"}
|
|
@@ -605,7 +604,7 @@ async def stream_a2a_response(
|
|
|
605
604
|
metadata=metadata,
|
|
606
605
|
)
|
|
607
606
|
response = SendStreamingMessageSuccessResponse(id=request_id, result=status_event)
|
|
608
|
-
yield json.dumps(response.model_dump(exclude_none=True))
|
|
607
|
+
yield f"event: TaskStatusUpdateEvent\ndata: {json.dumps(response.model_dump(exclude_none=True))}\n\n"
|
|
609
608
|
|
|
610
609
|
# Send condition events
|
|
611
610
|
elif isinstance(event, ConditionExecutionStartedEvent):
|
|
@@ -623,7 +622,7 @@ async def stream_a2a_response(
|
|
|
623
622
|
metadata=metadata,
|
|
624
623
|
)
|
|
625
624
|
response = SendStreamingMessageSuccessResponse(id=request_id, result=status_event)
|
|
626
|
-
yield json.dumps(response.model_dump(exclude_none=True))
|
|
625
|
+
yield f"event: TaskStatusUpdateEvent\ndata: {json.dumps(response.model_dump(exclude_none=True))}\n\n"
|
|
627
626
|
|
|
628
627
|
elif isinstance(event, ConditionExecutionCompletedEvent):
|
|
629
628
|
metadata = {"agno_event_type": "condition_execution_completed"}
|
|
@@ -642,7 +641,7 @@ async def stream_a2a_response(
|
|
|
642
641
|
metadata=metadata,
|
|
643
642
|
)
|
|
644
643
|
response = SendStreamingMessageSuccessResponse(id=request_id, result=status_event)
|
|
645
|
-
yield json.dumps(response.model_dump(exclude_none=True))
|
|
644
|
+
yield f"event: TaskStatusUpdateEvent\ndata: {json.dumps(response.model_dump(exclude_none=True))}\n\n"
|
|
646
645
|
|
|
647
646
|
# Send router events
|
|
648
647
|
elif isinstance(event, RouterExecutionStartedEvent):
|
|
@@ -660,7 +659,7 @@ async def stream_a2a_response(
|
|
|
660
659
|
metadata=metadata,
|
|
661
660
|
)
|
|
662
661
|
response = SendStreamingMessageSuccessResponse(id=request_id, result=status_event)
|
|
663
|
-
yield json.dumps(response.model_dump(exclude_none=True))
|
|
662
|
+
yield f"event: TaskStatusUpdateEvent\ndata: {json.dumps(response.model_dump(exclude_none=True))}\n\n"
|
|
664
663
|
|
|
665
664
|
elif isinstance(event, RouterExecutionCompletedEvent):
|
|
666
665
|
metadata = {"agno_event_type": "router_execution_completed"}
|
|
@@ -679,7 +678,7 @@ async def stream_a2a_response(
|
|
|
679
678
|
metadata=metadata,
|
|
680
679
|
)
|
|
681
680
|
response = SendStreamingMessageSuccessResponse(id=request_id, result=status_event)
|
|
682
|
-
yield json.dumps(response.model_dump(exclude_none=True))
|
|
681
|
+
yield f"event: TaskStatusUpdateEvent\ndata: {json.dumps(response.model_dump(exclude_none=True))}\n\n"
|
|
683
682
|
|
|
684
683
|
# Send steps events
|
|
685
684
|
elif isinstance(event, StepsExecutionStartedEvent):
|
|
@@ -697,7 +696,7 @@ async def stream_a2a_response(
|
|
|
697
696
|
metadata=metadata,
|
|
698
697
|
)
|
|
699
698
|
response = SendStreamingMessageSuccessResponse(id=request_id, result=status_event)
|
|
700
|
-
yield json.dumps(response.model_dump(exclude_none=True))
|
|
699
|
+
yield f"event: TaskStatusUpdateEvent\ndata: {json.dumps(response.model_dump(exclude_none=True))}\n\n"
|
|
701
700
|
|
|
702
701
|
elif isinstance(event, StepsExecutionCompletedEvent):
|
|
703
702
|
metadata = {"agno_event_type": "steps_execution_completed"}
|
|
@@ -716,7 +715,7 @@ async def stream_a2a_response(
|
|
|
716
715
|
metadata=metadata,
|
|
717
716
|
)
|
|
718
717
|
response = SendStreamingMessageSuccessResponse(id=request_id, result=status_event)
|
|
719
|
-
yield json.dumps(response.model_dump(exclude_none=True))
|
|
718
|
+
yield f"event: TaskStatusUpdateEvent\ndata: {json.dumps(response.model_dump(exclude_none=True))}\n\n"
|
|
720
719
|
|
|
721
720
|
# Capture completion event for final task construction
|
|
722
721
|
elif isinstance(event, (RunCompletedEvent, TeamRunCompletedEvent, WorkflowCompletedEvent)):
|
|
@@ -748,7 +747,7 @@ async def stream_a2a_response(
|
|
|
748
747
|
final=True,
|
|
749
748
|
)
|
|
750
749
|
response = SendStreamingMessageSuccessResponse(id=request_id, result=final_status_event)
|
|
751
|
-
yield json.dumps(response.model_dump(exclude_none=True))
|
|
750
|
+
yield f"event: TaskStatusUpdateEvent\ndata: {json.dumps(response.model_dump(exclude_none=True))}\n\n"
|
|
752
751
|
|
|
753
752
|
# 4. Send final task
|
|
754
753
|
# Handle cancelled case
|
|
@@ -778,7 +777,7 @@ async def stream_a2a_response(
|
|
|
778
777
|
history=[final_message],
|
|
779
778
|
)
|
|
780
779
|
response = SendStreamingMessageSuccessResponse(id=request_id, result=task)
|
|
781
|
-
yield json.dumps(response.model_dump(exclude_none=True))
|
|
780
|
+
yield f"event: Task\ndata: {json.dumps(response.model_dump(exclude_none=True))}\n\n"
|
|
782
781
|
return
|
|
783
782
|
|
|
784
783
|
# Build from completion_event if available, otherwise use accumulated content
|
|
@@ -846,8 +845,8 @@ async def stream_a2a_response(
|
|
|
846
845
|
|
|
847
846
|
# Handle all other data as Message metadata
|
|
848
847
|
final_metadata: Dict[str, Any] = {}
|
|
849
|
-
if hasattr(completion_event, "metrics") and completion_event.metrics:
|
|
850
|
-
final_metadata["metrics"] = completion_event.metrics.
|
|
848
|
+
if hasattr(completion_event, "metrics") and completion_event.metrics: # type: ignore
|
|
849
|
+
final_metadata["metrics"] = completion_event.metrics.to_dict() # type: ignore
|
|
851
850
|
if hasattr(completion_event, "metadata") and completion_event.metadata:
|
|
852
851
|
final_metadata.update(completion_event.metadata)
|
|
853
852
|
|
|
@@ -880,7 +879,7 @@ async def stream_a2a_response(
|
|
|
880
879
|
artifacts=artifacts if artifacts else None,
|
|
881
880
|
)
|
|
882
881
|
response = SendStreamingMessageSuccessResponse(id=request_id, result=task)
|
|
883
|
-
yield json.dumps(response.model_dump(exclude_none=True))
|
|
882
|
+
yield f"event: Task\ndata: {json.dumps(response.model_dump(exclude_none=True))}\n\n"
|
|
884
883
|
|
|
885
884
|
|
|
886
885
|
async def stream_a2a_response_with_error_handling(
|
|
@@ -904,7 +903,7 @@ async def stream_a2a_response_with_error_handling(
|
|
|
904
903
|
final=True,
|
|
905
904
|
)
|
|
906
905
|
response = SendStreamingMessageSuccessResponse(id=request_id, result=failed_status_event)
|
|
907
|
-
yield json.dumps(response.model_dump(exclude_none=True))
|
|
906
|
+
yield f"event: TaskStatusUpdateEvent\ndata: {json.dumps(response.model_dump(exclude_none=True))}\n\n"
|
|
908
907
|
|
|
909
908
|
# Send failed Task
|
|
910
909
|
error_message = A2AMessage(
|
|
@@ -921,4 +920,4 @@ async def stream_a2a_response_with_error_handling(
|
|
|
921
920
|
)
|
|
922
921
|
|
|
923
922
|
response = SendStreamingMessageSuccessResponse(id=request_id, result=failed_task)
|
|
924
|
-
yield json.dumps(response.model_dump(exclude_none=True))
|
|
923
|
+
yield f"event: Task\ndata: {json.dumps(response.model_dump(exclude_none=True))}\n\n"
|