aip-agents-binary 0.5.20__py3-none-manylinux_2_31_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- aip_agents/__init__.py +65 -0
- aip_agents/__init__.pyi +19 -0
- aip_agents/a2a/__init__.py +19 -0
- aip_agents/a2a/__init__.pyi +3 -0
- aip_agents/a2a/server/__init__.py +10 -0
- aip_agents/a2a/server/__init__.pyi +4 -0
- aip_agents/a2a/server/base_executor.py +1086 -0
- aip_agents/a2a/server/base_executor.pyi +73 -0
- aip_agents/a2a/server/google_adk_executor.py +198 -0
- aip_agents/a2a/server/google_adk_executor.pyi +51 -0
- aip_agents/a2a/server/langflow_executor.py +180 -0
- aip_agents/a2a/server/langflow_executor.pyi +43 -0
- aip_agents/a2a/server/langgraph_executor.py +270 -0
- aip_agents/a2a/server/langgraph_executor.pyi +47 -0
- aip_agents/a2a/types.py +232 -0
- aip_agents/a2a/types.pyi +132 -0
- aip_agents/agent/__init__.py +27 -0
- aip_agents/agent/__init__.pyi +9 -0
- aip_agents/agent/base_agent.py +970 -0
- aip_agents/agent/base_agent.pyi +221 -0
- aip_agents/agent/base_langgraph_agent.py +2942 -0
- aip_agents/agent/base_langgraph_agent.pyi +232 -0
- aip_agents/agent/google_adk_agent.py +926 -0
- aip_agents/agent/google_adk_agent.pyi +141 -0
- aip_agents/agent/google_adk_constants.py +6 -0
- aip_agents/agent/google_adk_constants.pyi +3 -0
- aip_agents/agent/hitl/__init__.py +24 -0
- aip_agents/agent/hitl/__init__.pyi +6 -0
- aip_agents/agent/hitl/config.py +28 -0
- aip_agents/agent/hitl/config.pyi +15 -0
- aip_agents/agent/hitl/langgraph_hitl_mixin.py +515 -0
- aip_agents/agent/hitl/langgraph_hitl_mixin.pyi +42 -0
- aip_agents/agent/hitl/manager.py +532 -0
- aip_agents/agent/hitl/manager.pyi +200 -0
- aip_agents/agent/hitl/models.py +18 -0
- aip_agents/agent/hitl/models.pyi +3 -0
- aip_agents/agent/hitl/prompt/__init__.py +9 -0
- aip_agents/agent/hitl/prompt/__init__.pyi +4 -0
- aip_agents/agent/hitl/prompt/base.py +42 -0
- aip_agents/agent/hitl/prompt/base.pyi +24 -0
- aip_agents/agent/hitl/prompt/deferred.py +73 -0
- aip_agents/agent/hitl/prompt/deferred.pyi +30 -0
- aip_agents/agent/hitl/registry.py +149 -0
- aip_agents/agent/hitl/registry.pyi +101 -0
- aip_agents/agent/interface.py +138 -0
- aip_agents/agent/interface.pyi +81 -0
- aip_agents/agent/interfaces.py +65 -0
- aip_agents/agent/interfaces.pyi +44 -0
- aip_agents/agent/langflow_agent.py +464 -0
- aip_agents/agent/langflow_agent.pyi +133 -0
- aip_agents/agent/langgraph_memory_enhancer_agent.py +433 -0
- aip_agents/agent/langgraph_memory_enhancer_agent.pyi +49 -0
- aip_agents/agent/langgraph_react_agent.py +2514 -0
- aip_agents/agent/langgraph_react_agent.pyi +126 -0
- aip_agents/agent/system_instruction_context.py +34 -0
- aip_agents/agent/system_instruction_context.pyi +13 -0
- aip_agents/clients/__init__.py +10 -0
- aip_agents/clients/__init__.pyi +4 -0
- aip_agents/clients/langflow/__init__.py +10 -0
- aip_agents/clients/langflow/__init__.pyi +4 -0
- aip_agents/clients/langflow/client.py +477 -0
- aip_agents/clients/langflow/client.pyi +140 -0
- aip_agents/clients/langflow/types.py +18 -0
- aip_agents/clients/langflow/types.pyi +7 -0
- aip_agents/constants.py +23 -0
- aip_agents/constants.pyi +7 -0
- aip_agents/credentials/manager.py +132 -0
- aip_agents/examples/__init__.py +5 -0
- aip_agents/examples/__init__.pyi +0 -0
- aip_agents/examples/compare_streaming_client.py +783 -0
- aip_agents/examples/compare_streaming_client.pyi +48 -0
- aip_agents/examples/compare_streaming_server.py +142 -0
- aip_agents/examples/compare_streaming_server.pyi +18 -0
- aip_agents/examples/demo_memory_recall.py +401 -0
- aip_agents/examples/demo_memory_recall.pyi +58 -0
- aip_agents/examples/hello_world_a2a_google_adk_client.py +49 -0
- aip_agents/examples/hello_world_a2a_google_adk_client.pyi +9 -0
- aip_agents/examples/hello_world_a2a_google_adk_client_agent.py +48 -0
- aip_agents/examples/hello_world_a2a_google_adk_client_agent.pyi +9 -0
- aip_agents/examples/hello_world_a2a_google_adk_client_streaming.py +60 -0
- aip_agents/examples/hello_world_a2a_google_adk_client_streaming.pyi +9 -0
- aip_agents/examples/hello_world_a2a_google_adk_server.py +79 -0
- aip_agents/examples/hello_world_a2a_google_adk_server.pyi +15 -0
- aip_agents/examples/hello_world_a2a_langchain_client.py +39 -0
- aip_agents/examples/hello_world_a2a_langchain_client.pyi +5 -0
- aip_agents/examples/hello_world_a2a_langchain_client_agent.py +39 -0
- aip_agents/examples/hello_world_a2a_langchain_client_agent.pyi +5 -0
- aip_agents/examples/hello_world_a2a_langchain_client_lm_invoker.py +37 -0
- aip_agents/examples/hello_world_a2a_langchain_client_lm_invoker.pyi +5 -0
- aip_agents/examples/hello_world_a2a_langchain_client_streaming.py +41 -0
- aip_agents/examples/hello_world_a2a_langchain_client_streaming.pyi +5 -0
- aip_agents/examples/hello_world_a2a_langchain_reference_client_streaming.py +60 -0
- aip_agents/examples/hello_world_a2a_langchain_reference_client_streaming.pyi +5 -0
- aip_agents/examples/hello_world_a2a_langchain_reference_server.py +105 -0
- aip_agents/examples/hello_world_a2a_langchain_reference_server.pyi +15 -0
- aip_agents/examples/hello_world_a2a_langchain_server.py +79 -0
- aip_agents/examples/hello_world_a2a_langchain_server.pyi +15 -0
- aip_agents/examples/hello_world_a2a_langchain_server_lm_invoker.py +78 -0
- aip_agents/examples/hello_world_a2a_langchain_server_lm_invoker.pyi +15 -0
- aip_agents/examples/hello_world_a2a_langflow_client.py +83 -0
- aip_agents/examples/hello_world_a2a_langflow_client.pyi +9 -0
- aip_agents/examples/hello_world_a2a_langflow_server.py +82 -0
- aip_agents/examples/hello_world_a2a_langflow_server.pyi +14 -0
- aip_agents/examples/hello_world_a2a_langgraph_artifact_client.py +73 -0
- aip_agents/examples/hello_world_a2a_langgraph_artifact_client.pyi +5 -0
- aip_agents/examples/hello_world_a2a_langgraph_artifact_client_streaming.py +76 -0
- aip_agents/examples/hello_world_a2a_langgraph_artifact_client_streaming.pyi +5 -0
- aip_agents/examples/hello_world_a2a_langgraph_artifact_server.py +92 -0
- aip_agents/examples/hello_world_a2a_langgraph_artifact_server.pyi +16 -0
- aip_agents/examples/hello_world_a2a_langgraph_client.py +54 -0
- aip_agents/examples/hello_world_a2a_langgraph_client.pyi +9 -0
- aip_agents/examples/hello_world_a2a_langgraph_client_agent.py +54 -0
- aip_agents/examples/hello_world_a2a_langgraph_client_agent.pyi +9 -0
- aip_agents/examples/hello_world_a2a_langgraph_client_agent_lm_invoker.py +32 -0
- aip_agents/examples/hello_world_a2a_langgraph_client_agent_lm_invoker.pyi +2 -0
- aip_agents/examples/hello_world_a2a_langgraph_client_streaming.py +50 -0
- aip_agents/examples/hello_world_a2a_langgraph_client_streaming.pyi +9 -0
- aip_agents/examples/hello_world_a2a_langgraph_client_streaming_lm_invoker.py +44 -0
- aip_agents/examples/hello_world_a2a_langgraph_client_streaming_lm_invoker.pyi +5 -0
- aip_agents/examples/hello_world_a2a_langgraph_client_streaming_tool_streaming.py +92 -0
- aip_agents/examples/hello_world_a2a_langgraph_client_streaming_tool_streaming.pyi +5 -0
- aip_agents/examples/hello_world_a2a_langgraph_server.py +84 -0
- aip_agents/examples/hello_world_a2a_langgraph_server.pyi +14 -0
- aip_agents/examples/hello_world_a2a_langgraph_server_lm_invoker.py +79 -0
- aip_agents/examples/hello_world_a2a_langgraph_server_lm_invoker.pyi +15 -0
- aip_agents/examples/hello_world_a2a_langgraph_server_tool_streaming.py +132 -0
- aip_agents/examples/hello_world_a2a_langgraph_server_tool_streaming.pyi +15 -0
- aip_agents/examples/hello_world_a2a_mcp_langgraph.py +196 -0
- aip_agents/examples/hello_world_a2a_mcp_langgraph.pyi +48 -0
- aip_agents/examples/hello_world_a2a_three_level_agent_hierarchy_client.py +244 -0
- aip_agents/examples/hello_world_a2a_three_level_agent_hierarchy_client.pyi +48 -0
- aip_agents/examples/hello_world_a2a_three_level_agent_hierarchy_server.py +251 -0
- aip_agents/examples/hello_world_a2a_three_level_agent_hierarchy_server.pyi +45 -0
- aip_agents/examples/hello_world_a2a_with_metadata_langchain_client.py +57 -0
- aip_agents/examples/hello_world_a2a_with_metadata_langchain_client.pyi +5 -0
- aip_agents/examples/hello_world_a2a_with_metadata_langchain_server_lm_invoker.py +80 -0
- aip_agents/examples/hello_world_a2a_with_metadata_langchain_server_lm_invoker.pyi +15 -0
- aip_agents/examples/hello_world_google_adk.py +41 -0
- aip_agents/examples/hello_world_google_adk.pyi +5 -0
- aip_agents/examples/hello_world_google_adk_mcp_http.py +34 -0
- aip_agents/examples/hello_world_google_adk_mcp_http.pyi +5 -0
- aip_agents/examples/hello_world_google_adk_mcp_http_stream.py +40 -0
- aip_agents/examples/hello_world_google_adk_mcp_http_stream.pyi +5 -0
- aip_agents/examples/hello_world_google_adk_mcp_sse.py +44 -0
- aip_agents/examples/hello_world_google_adk_mcp_sse.pyi +5 -0
- aip_agents/examples/hello_world_google_adk_mcp_sse_stream.py +48 -0
- aip_agents/examples/hello_world_google_adk_mcp_sse_stream.pyi +5 -0
- aip_agents/examples/hello_world_google_adk_mcp_stdio.py +44 -0
- aip_agents/examples/hello_world_google_adk_mcp_stdio.pyi +5 -0
- aip_agents/examples/hello_world_google_adk_mcp_stdio_stream.py +48 -0
- aip_agents/examples/hello_world_google_adk_mcp_stdio_stream.pyi +5 -0
- aip_agents/examples/hello_world_google_adk_stream.py +44 -0
- aip_agents/examples/hello_world_google_adk_stream.pyi +5 -0
- aip_agents/examples/hello_world_langchain.py +28 -0
- aip_agents/examples/hello_world_langchain.pyi +5 -0
- aip_agents/examples/hello_world_langchain_lm_invoker.py +15 -0
- aip_agents/examples/hello_world_langchain_lm_invoker.pyi +2 -0
- aip_agents/examples/hello_world_langchain_mcp_http.py +34 -0
- aip_agents/examples/hello_world_langchain_mcp_http.pyi +5 -0
- aip_agents/examples/hello_world_langchain_mcp_http_interactive.py +130 -0
- aip_agents/examples/hello_world_langchain_mcp_http_interactive.pyi +16 -0
- aip_agents/examples/hello_world_langchain_mcp_http_stream.py +42 -0
- aip_agents/examples/hello_world_langchain_mcp_http_stream.pyi +5 -0
- aip_agents/examples/hello_world_langchain_mcp_multi_server.py +155 -0
- aip_agents/examples/hello_world_langchain_mcp_multi_server.pyi +18 -0
- aip_agents/examples/hello_world_langchain_mcp_sse.py +34 -0
- aip_agents/examples/hello_world_langchain_mcp_sse.pyi +5 -0
- aip_agents/examples/hello_world_langchain_mcp_sse_stream.py +40 -0
- aip_agents/examples/hello_world_langchain_mcp_sse_stream.pyi +5 -0
- aip_agents/examples/hello_world_langchain_mcp_stdio.py +30 -0
- aip_agents/examples/hello_world_langchain_mcp_stdio.pyi +5 -0
- aip_agents/examples/hello_world_langchain_mcp_stdio_stream.py +41 -0
- aip_agents/examples/hello_world_langchain_mcp_stdio_stream.pyi +5 -0
- aip_agents/examples/hello_world_langchain_stream.py +36 -0
- aip_agents/examples/hello_world_langchain_stream.pyi +5 -0
- aip_agents/examples/hello_world_langchain_stream_lm_invoker.py +39 -0
- aip_agents/examples/hello_world_langchain_stream_lm_invoker.pyi +5 -0
- aip_agents/examples/hello_world_langflow_agent.py +163 -0
- aip_agents/examples/hello_world_langflow_agent.pyi +35 -0
- aip_agents/examples/hello_world_langgraph.py +39 -0
- aip_agents/examples/hello_world_langgraph.pyi +5 -0
- aip_agents/examples/hello_world_langgraph_bosa_twitter.py +41 -0
- aip_agents/examples/hello_world_langgraph_bosa_twitter.pyi +5 -0
- aip_agents/examples/hello_world_langgraph_mcp_http.py +31 -0
- aip_agents/examples/hello_world_langgraph_mcp_http.pyi +5 -0
- aip_agents/examples/hello_world_langgraph_mcp_http_stream.py +34 -0
- aip_agents/examples/hello_world_langgraph_mcp_http_stream.pyi +5 -0
- aip_agents/examples/hello_world_langgraph_mcp_sse.py +35 -0
- aip_agents/examples/hello_world_langgraph_mcp_sse.pyi +5 -0
- aip_agents/examples/hello_world_langgraph_mcp_sse_stream.py +50 -0
- aip_agents/examples/hello_world_langgraph_mcp_sse_stream.pyi +5 -0
- aip_agents/examples/hello_world_langgraph_mcp_stdio.py +35 -0
- aip_agents/examples/hello_world_langgraph_mcp_stdio.pyi +5 -0
- aip_agents/examples/hello_world_langgraph_mcp_stdio_stream.py +50 -0
- aip_agents/examples/hello_world_langgraph_mcp_stdio_stream.pyi +5 -0
- aip_agents/examples/hello_world_langgraph_stream.py +43 -0
- aip_agents/examples/hello_world_langgraph_stream.pyi +5 -0
- aip_agents/examples/hello_world_langgraph_stream_lm_invoker.py +37 -0
- aip_agents/examples/hello_world_langgraph_stream_lm_invoker.pyi +5 -0
- aip_agents/examples/hello_world_model_switch_cli.py +210 -0
- aip_agents/examples/hello_world_model_switch_cli.pyi +30 -0
- aip_agents/examples/hello_world_multi_agent_adk.py +75 -0
- aip_agents/examples/hello_world_multi_agent_adk.pyi +6 -0
- aip_agents/examples/hello_world_multi_agent_langchain.py +54 -0
- aip_agents/examples/hello_world_multi_agent_langchain.pyi +5 -0
- aip_agents/examples/hello_world_multi_agent_langgraph.py +66 -0
- aip_agents/examples/hello_world_multi_agent_langgraph.pyi +5 -0
- aip_agents/examples/hello_world_multi_agent_langgraph_lm_invoker.py +69 -0
- aip_agents/examples/hello_world_multi_agent_langgraph_lm_invoker.pyi +5 -0
- aip_agents/examples/hello_world_pii_logger.py +21 -0
- aip_agents/examples/hello_world_pii_logger.pyi +5 -0
- aip_agents/examples/hello_world_sentry.py +133 -0
- aip_agents/examples/hello_world_sentry.pyi +21 -0
- aip_agents/examples/hello_world_step_limits.py +273 -0
- aip_agents/examples/hello_world_step_limits.pyi +17 -0
- aip_agents/examples/hello_world_stock_a2a_server.py +103 -0
- aip_agents/examples/hello_world_stock_a2a_server.pyi +17 -0
- aip_agents/examples/hello_world_tool_output_client.py +46 -0
- aip_agents/examples/hello_world_tool_output_client.pyi +5 -0
- aip_agents/examples/hello_world_tool_output_server.py +114 -0
- aip_agents/examples/hello_world_tool_output_server.pyi +19 -0
- aip_agents/examples/hitl_demo.py +724 -0
- aip_agents/examples/hitl_demo.pyi +67 -0
- aip_agents/examples/mcp_configs/configs.py +63 -0
- aip_agents/examples/mcp_servers/common.py +76 -0
- aip_agents/examples/mcp_servers/mcp_name.py +29 -0
- aip_agents/examples/mcp_servers/mcp_server_http.py +19 -0
- aip_agents/examples/mcp_servers/mcp_server_sse.py +19 -0
- aip_agents/examples/mcp_servers/mcp_server_stdio.py +19 -0
- aip_agents/examples/mcp_servers/mcp_time.py +10 -0
- aip_agents/examples/pii_demo_langgraph_client.py +69 -0
- aip_agents/examples/pii_demo_langgraph_client.pyi +5 -0
- aip_agents/examples/pii_demo_langgraph_server.py +126 -0
- aip_agents/examples/pii_demo_langgraph_server.pyi +20 -0
- aip_agents/examples/pii_demo_multi_agent_client.py +80 -0
- aip_agents/examples/pii_demo_multi_agent_client.pyi +5 -0
- aip_agents/examples/pii_demo_multi_agent_server.py +247 -0
- aip_agents/examples/pii_demo_multi_agent_server.pyi +40 -0
- aip_agents/examples/todolist_planning_a2a_langchain_client.py +70 -0
- aip_agents/examples/todolist_planning_a2a_langchain_client.pyi +5 -0
- aip_agents/examples/todolist_planning_a2a_langgraph_server.py +88 -0
- aip_agents/examples/todolist_planning_a2a_langgraph_server.pyi +19 -0
- aip_agents/examples/tools/__init__.py +27 -0
- aip_agents/examples/tools/__init__.pyi +9 -0
- aip_agents/examples/tools/adk_arithmetic_tools.py +36 -0
- aip_agents/examples/tools/adk_arithmetic_tools.pyi +24 -0
- aip_agents/examples/tools/adk_weather_tool.py +60 -0
- aip_agents/examples/tools/adk_weather_tool.pyi +18 -0
- aip_agents/examples/tools/data_generator_tool.py +103 -0
- aip_agents/examples/tools/data_generator_tool.pyi +15 -0
- aip_agents/examples/tools/data_visualization_tool.py +312 -0
- aip_agents/examples/tools/data_visualization_tool.pyi +19 -0
- aip_agents/examples/tools/image_artifact_tool.py +136 -0
- aip_agents/examples/tools/image_artifact_tool.pyi +26 -0
- aip_agents/examples/tools/langchain_arithmetic_tools.py +26 -0
- aip_agents/examples/tools/langchain_arithmetic_tools.pyi +17 -0
- aip_agents/examples/tools/langchain_currency_exchange_tool.py +88 -0
- aip_agents/examples/tools/langchain_currency_exchange_tool.pyi +20 -0
- aip_agents/examples/tools/langchain_graph_artifact_tool.py +172 -0
- aip_agents/examples/tools/langchain_graph_artifact_tool.pyi +25 -0
- aip_agents/examples/tools/langchain_weather_tool.py +48 -0
- aip_agents/examples/tools/langchain_weather_tool.pyi +19 -0
- aip_agents/examples/tools/langgraph_streaming_tool.py +130 -0
- aip_agents/examples/tools/langgraph_streaming_tool.pyi +43 -0
- aip_agents/examples/tools/mock_retrieval_tool.py +56 -0
- aip_agents/examples/tools/mock_retrieval_tool.pyi +13 -0
- aip_agents/examples/tools/pii_demo_tools.py +189 -0
- aip_agents/examples/tools/pii_demo_tools.pyi +54 -0
- aip_agents/examples/tools/random_chart_tool.py +142 -0
- aip_agents/examples/tools/random_chart_tool.pyi +20 -0
- aip_agents/examples/tools/serper_tool.py +202 -0
- aip_agents/examples/tools/serper_tool.pyi +16 -0
- aip_agents/examples/tools/stock_tools.py +82 -0
- aip_agents/examples/tools/stock_tools.pyi +36 -0
- aip_agents/examples/tools/table_generator_tool.py +167 -0
- aip_agents/examples/tools/table_generator_tool.pyi +22 -0
- aip_agents/examples/tools/time_tool.py +82 -0
- aip_agents/examples/tools/time_tool.pyi +15 -0
- aip_agents/examples/tools/weather_forecast_tool.py +38 -0
- aip_agents/examples/tools/weather_forecast_tool.pyi +14 -0
- aip_agents/executor/agent_executor.py +473 -0
- aip_agents/executor/base.py +48 -0
- aip_agents/mcp/__init__.py +1 -0
- aip_agents/mcp/__init__.pyi +0 -0
- aip_agents/mcp/client/__init__.py +14 -0
- aip_agents/mcp/client/__init__.pyi +5 -0
- aip_agents/mcp/client/base_mcp_client.py +369 -0
- aip_agents/mcp/client/base_mcp_client.pyi +148 -0
- aip_agents/mcp/client/connection_manager.py +193 -0
- aip_agents/mcp/client/connection_manager.pyi +48 -0
- aip_agents/mcp/client/google_adk/__init__.py +11 -0
- aip_agents/mcp/client/google_adk/__init__.pyi +3 -0
- aip_agents/mcp/client/google_adk/client.py +381 -0
- aip_agents/mcp/client/google_adk/client.pyi +75 -0
- aip_agents/mcp/client/langchain/__init__.py +11 -0
- aip_agents/mcp/client/langchain/__init__.pyi +3 -0
- aip_agents/mcp/client/langchain/client.py +265 -0
- aip_agents/mcp/client/langchain/client.pyi +48 -0
- aip_agents/mcp/client/persistent_session.py +359 -0
- aip_agents/mcp/client/persistent_session.pyi +113 -0
- aip_agents/mcp/client/session_pool.py +351 -0
- aip_agents/mcp/client/session_pool.pyi +101 -0
- aip_agents/mcp/client/transports.py +215 -0
- aip_agents/mcp/client/transports.pyi +123 -0
- aip_agents/mcp/utils/__init__.py +7 -0
- aip_agents/mcp/utils/__init__.pyi +0 -0
- aip_agents/mcp/utils/config_validator.py +139 -0
- aip_agents/mcp/utils/config_validator.pyi +82 -0
- aip_agents/memory/__init__.py +14 -0
- aip_agents/memory/__init__.pyi +5 -0
- aip_agents/memory/adapters/__init__.py +10 -0
- aip_agents/memory/adapters/__init__.pyi +4 -0
- aip_agents/memory/adapters/base_adapter.py +717 -0
- aip_agents/memory/adapters/base_adapter.pyi +150 -0
- aip_agents/memory/adapters/mem0.py +84 -0
- aip_agents/memory/adapters/mem0.pyi +22 -0
- aip_agents/memory/base.py +84 -0
- aip_agents/memory/base.pyi +60 -0
- aip_agents/memory/constants.py +49 -0
- aip_agents/memory/constants.pyi +25 -0
- aip_agents/memory/factory.py +86 -0
- aip_agents/memory/factory.pyi +24 -0
- aip_agents/memory/guidance.py +20 -0
- aip_agents/memory/guidance.pyi +3 -0
- aip_agents/memory/simple_memory.py +47 -0
- aip_agents/memory/simple_memory.pyi +23 -0
- aip_agents/middleware/__init__.py +17 -0
- aip_agents/middleware/__init__.pyi +5 -0
- aip_agents/middleware/base.py +88 -0
- aip_agents/middleware/base.pyi +71 -0
- aip_agents/middleware/manager.py +128 -0
- aip_agents/middleware/manager.pyi +80 -0
- aip_agents/middleware/todolist.py +274 -0
- aip_agents/middleware/todolist.pyi +125 -0
- aip_agents/schema/__init__.py +69 -0
- aip_agents/schema/__init__.pyi +9 -0
- aip_agents/schema/a2a.py +56 -0
- aip_agents/schema/a2a.pyi +40 -0
- aip_agents/schema/agent.py +111 -0
- aip_agents/schema/agent.pyi +65 -0
- aip_agents/schema/hitl.py +157 -0
- aip_agents/schema/hitl.pyi +89 -0
- aip_agents/schema/langgraph.py +37 -0
- aip_agents/schema/langgraph.pyi +28 -0
- aip_agents/schema/model_id.py +97 -0
- aip_agents/schema/model_id.pyi +54 -0
- aip_agents/schema/step_limit.py +108 -0
- aip_agents/schema/step_limit.pyi +63 -0
- aip_agents/schema/storage.py +40 -0
- aip_agents/schema/storage.pyi +21 -0
- aip_agents/sentry/__init__.py +11 -0
- aip_agents/sentry/__init__.pyi +3 -0
- aip_agents/sentry/sentry.py +151 -0
- aip_agents/sentry/sentry.pyi +48 -0
- aip_agents/storage/__init__.py +41 -0
- aip_agents/storage/__init__.pyi +8 -0
- aip_agents/storage/base.py +85 -0
- aip_agents/storage/base.pyi +58 -0
- aip_agents/storage/clients/__init__.py +12 -0
- aip_agents/storage/clients/__init__.pyi +3 -0
- aip_agents/storage/clients/minio_client.py +318 -0
- aip_agents/storage/clients/minio_client.pyi +137 -0
- aip_agents/storage/config.py +62 -0
- aip_agents/storage/config.pyi +29 -0
- aip_agents/storage/providers/__init__.py +15 -0
- aip_agents/storage/providers/__init__.pyi +5 -0
- aip_agents/storage/providers/base.py +106 -0
- aip_agents/storage/providers/base.pyi +88 -0
- aip_agents/storage/providers/memory.py +114 -0
- aip_agents/storage/providers/memory.pyi +79 -0
- aip_agents/storage/providers/object_storage.py +214 -0
- aip_agents/storage/providers/object_storage.pyi +98 -0
- aip_agents/tools/__init__.py +33 -0
- aip_agents/tools/__init__.pyi +13 -0
- aip_agents/tools/bosa_tools.py +105 -0
- aip_agents/tools/bosa_tools.pyi +37 -0
- aip_agents/tools/browser_use/__init__.py +82 -0
- aip_agents/tools/browser_use/__init__.pyi +14 -0
- aip_agents/tools/browser_use/action_parser.py +103 -0
- aip_agents/tools/browser_use/action_parser.pyi +18 -0
- aip_agents/tools/browser_use/browser_use_tool.py +1112 -0
- aip_agents/tools/browser_use/browser_use_tool.pyi +50 -0
- aip_agents/tools/browser_use/llm_config.py +120 -0
- aip_agents/tools/browser_use/llm_config.pyi +52 -0
- aip_agents/tools/browser_use/minio_storage.py +198 -0
- aip_agents/tools/browser_use/minio_storage.pyi +109 -0
- aip_agents/tools/browser_use/schemas.py +119 -0
- aip_agents/tools/browser_use/schemas.pyi +32 -0
- aip_agents/tools/browser_use/session.py +76 -0
- aip_agents/tools/browser_use/session.pyi +4 -0
- aip_agents/tools/browser_use/session_errors.py +132 -0
- aip_agents/tools/browser_use/session_errors.pyi +53 -0
- aip_agents/tools/browser_use/steel_session_recording.py +317 -0
- aip_agents/tools/browser_use/steel_session_recording.pyi +63 -0
- aip_agents/tools/browser_use/streaming.py +813 -0
- aip_agents/tools/browser_use/streaming.pyi +81 -0
- aip_agents/tools/browser_use/structured_data_parser.py +257 -0
- aip_agents/tools/browser_use/structured_data_parser.pyi +86 -0
- aip_agents/tools/browser_use/structured_data_recovery.py +204 -0
- aip_agents/tools/browser_use/structured_data_recovery.pyi +43 -0
- aip_agents/tools/browser_use/types.py +78 -0
- aip_agents/tools/browser_use/types.pyi +45 -0
- aip_agents/tools/code_sandbox/__init__.py +26 -0
- aip_agents/tools/code_sandbox/__init__.pyi +3 -0
- aip_agents/tools/code_sandbox/constant.py +13 -0
- aip_agents/tools/code_sandbox/constant.pyi +4 -0
- aip_agents/tools/code_sandbox/e2b_cloud_sandbox_extended.py +257 -0
- aip_agents/tools/code_sandbox/e2b_cloud_sandbox_extended.pyi +86 -0
- aip_agents/tools/code_sandbox/e2b_sandbox_tool.py +411 -0
- aip_agents/tools/code_sandbox/e2b_sandbox_tool.pyi +29 -0
- aip_agents/tools/constants.py +165 -0
- aip_agents/tools/constants.pyi +135 -0
- aip_agents/tools/document_loader/__init__.py +44 -0
- aip_agents/tools/document_loader/__init__.pyi +7 -0
- aip_agents/tools/document_loader/base_reader.py +302 -0
- aip_agents/tools/document_loader/base_reader.pyi +75 -0
- aip_agents/tools/document_loader/docx_reader_tool.py +68 -0
- aip_agents/tools/document_loader/docx_reader_tool.pyi +10 -0
- aip_agents/tools/document_loader/excel_reader_tool.py +171 -0
- aip_agents/tools/document_loader/excel_reader_tool.pyi +26 -0
- aip_agents/tools/document_loader/pdf_reader_tool.py +79 -0
- aip_agents/tools/document_loader/pdf_reader_tool.pyi +11 -0
- aip_agents/tools/document_loader/pdf_splitter.py +169 -0
- aip_agents/tools/document_loader/pdf_splitter.pyi +18 -0
- aip_agents/tools/gl_connector/__init__.py +5 -0
- aip_agents/tools/gl_connector/__init__.pyi +3 -0
- aip_agents/tools/gl_connector/tool.py +351 -0
- aip_agents/tools/gl_connector/tool.pyi +74 -0
- aip_agents/tools/memory_search/__init__.py +22 -0
- aip_agents/tools/memory_search/__init__.pyi +5 -0
- aip_agents/tools/memory_search/base.py +200 -0
- aip_agents/tools/memory_search/base.pyi +69 -0
- aip_agents/tools/memory_search/mem0.py +258 -0
- aip_agents/tools/memory_search/mem0.pyi +19 -0
- aip_agents/tools/memory_search/schema.py +48 -0
- aip_agents/tools/memory_search/schema.pyi +15 -0
- aip_agents/tools/memory_search_tool.py +26 -0
- aip_agents/tools/memory_search_tool.pyi +3 -0
- aip_agents/tools/time_tool.py +117 -0
- aip_agents/tools/time_tool.pyi +16 -0
- aip_agents/tools/tool_config_injector.py +300 -0
- aip_agents/tools/tool_config_injector.pyi +26 -0
- aip_agents/tools/web_search/__init__.py +15 -0
- aip_agents/tools/web_search/__init__.pyi +3 -0
- aip_agents/tools/web_search/serper_tool.py +187 -0
- aip_agents/tools/web_search/serper_tool.pyi +19 -0
- aip_agents/types/__init__.py +70 -0
- aip_agents/types/__init__.pyi +36 -0
- aip_agents/types/a2a_events.py +13 -0
- aip_agents/types/a2a_events.pyi +3 -0
- aip_agents/utils/__init__.py +79 -0
- aip_agents/utils/__init__.pyi +11 -0
- aip_agents/utils/a2a_connector.py +1757 -0
- aip_agents/utils/a2a_connector.pyi +146 -0
- aip_agents/utils/artifact_helpers.py +502 -0
- aip_agents/utils/artifact_helpers.pyi +203 -0
- aip_agents/utils/constants.py +22 -0
- aip_agents/utils/constants.pyi +10 -0
- aip_agents/utils/datetime/__init__.py +34 -0
- aip_agents/utils/datetime/__init__.pyi +4 -0
- aip_agents/utils/datetime/normalization.py +231 -0
- aip_agents/utils/datetime/normalization.pyi +95 -0
- aip_agents/utils/datetime/timezone.py +206 -0
- aip_agents/utils/datetime/timezone.pyi +48 -0
- aip_agents/utils/env_loader.py +27 -0
- aip_agents/utils/env_loader.pyi +10 -0
- aip_agents/utils/event_handler_registry.py +58 -0
- aip_agents/utils/event_handler_registry.pyi +23 -0
- aip_agents/utils/file_prompt_utils.py +176 -0
- aip_agents/utils/file_prompt_utils.pyi +21 -0
- aip_agents/utils/final_response_builder.py +211 -0
- aip_agents/utils/final_response_builder.pyi +34 -0
- aip_agents/utils/formatter_llm_client.py +231 -0
- aip_agents/utils/formatter_llm_client.pyi +71 -0
- aip_agents/utils/langgraph/__init__.py +19 -0
- aip_agents/utils/langgraph/__init__.pyi +3 -0
- aip_agents/utils/langgraph/converter.py +128 -0
- aip_agents/utils/langgraph/converter.pyi +49 -0
- aip_agents/utils/langgraph/tool_managers/__init__.py +15 -0
- aip_agents/utils/langgraph/tool_managers/__init__.pyi +5 -0
- aip_agents/utils/langgraph/tool_managers/a2a_tool_manager.py +99 -0
- aip_agents/utils/langgraph/tool_managers/a2a_tool_manager.pyi +35 -0
- aip_agents/utils/langgraph/tool_managers/base_tool_manager.py +66 -0
- aip_agents/utils/langgraph/tool_managers/base_tool_manager.pyi +48 -0
- aip_agents/utils/langgraph/tool_managers/delegation_tool_manager.py +1071 -0
- aip_agents/utils/langgraph/tool_managers/delegation_tool_manager.pyi +56 -0
- aip_agents/utils/langgraph/tool_output_management.py +967 -0
- aip_agents/utils/langgraph/tool_output_management.pyi +292 -0
- aip_agents/utils/logger.py +195 -0
- aip_agents/utils/logger.pyi +60 -0
- aip_agents/utils/metadata/__init__.py +27 -0
- aip_agents/utils/metadata/__init__.pyi +5 -0
- aip_agents/utils/metadata/activity_metadata_helper.py +407 -0
- aip_agents/utils/metadata/activity_metadata_helper.pyi +25 -0
- aip_agents/utils/metadata/activity_narrative/__init__.py +35 -0
- aip_agents/utils/metadata/activity_narrative/__init__.pyi +7 -0
- aip_agents/utils/metadata/activity_narrative/builder.py +817 -0
- aip_agents/utils/metadata/activity_narrative/builder.pyi +35 -0
- aip_agents/utils/metadata/activity_narrative/constants.py +51 -0
- aip_agents/utils/metadata/activity_narrative/constants.pyi +10 -0
- aip_agents/utils/metadata/activity_narrative/context.py +49 -0
- aip_agents/utils/metadata/activity_narrative/context.pyi +32 -0
- aip_agents/utils/metadata/activity_narrative/formatters.py +230 -0
- aip_agents/utils/metadata/activity_narrative/formatters.pyi +48 -0
- aip_agents/utils/metadata/activity_narrative/utils.py +35 -0
- aip_agents/utils/metadata/activity_narrative/utils.pyi +12 -0
- aip_agents/utils/metadata/schemas/__init__.py +16 -0
- aip_agents/utils/metadata/schemas/__init__.pyi +4 -0
- aip_agents/utils/metadata/schemas/activity_schema.py +29 -0
- aip_agents/utils/metadata/schemas/activity_schema.pyi +18 -0
- aip_agents/utils/metadata/schemas/thinking_schema.py +31 -0
- aip_agents/utils/metadata/schemas/thinking_schema.pyi +20 -0
- aip_agents/utils/metadata/thinking_metadata_helper.py +38 -0
- aip_agents/utils/metadata/thinking_metadata_helper.pyi +4 -0
- aip_agents/utils/metadata_helper.py +358 -0
- aip_agents/utils/metadata_helper.pyi +117 -0
- aip_agents/utils/name_preprocessor/__init__.py +17 -0
- aip_agents/utils/name_preprocessor/__init__.pyi +6 -0
- aip_agents/utils/name_preprocessor/base_name_preprocessor.py +73 -0
- aip_agents/utils/name_preprocessor/base_name_preprocessor.pyi +52 -0
- aip_agents/utils/name_preprocessor/google_name_preprocessor.py +100 -0
- aip_agents/utils/name_preprocessor/google_name_preprocessor.pyi +38 -0
- aip_agents/utils/name_preprocessor/name_preprocessor.py +87 -0
- aip_agents/utils/name_preprocessor/name_preprocessor.pyi +41 -0
- aip_agents/utils/name_preprocessor/openai_name_preprocessor.py +48 -0
- aip_agents/utils/name_preprocessor/openai_name_preprocessor.pyi +34 -0
- aip_agents/utils/pii/__init__.py +25 -0
- aip_agents/utils/pii/__init__.pyi +5 -0
- aip_agents/utils/pii/pii_handler.py +397 -0
- aip_agents/utils/pii/pii_handler.pyi +96 -0
- aip_agents/utils/pii/pii_helper.py +207 -0
- aip_agents/utils/pii/pii_helper.pyi +78 -0
- aip_agents/utils/pii/uuid_deanonymizer_mapping.py +195 -0
- aip_agents/utils/pii/uuid_deanonymizer_mapping.pyi +73 -0
- aip_agents/utils/reference_helper.py +273 -0
- aip_agents/utils/reference_helper.pyi +81 -0
- aip_agents/utils/sse_chunk_transformer.py +831 -0
- aip_agents/utils/sse_chunk_transformer.pyi +166 -0
- aip_agents/utils/step_limit_manager.py +265 -0
- aip_agents/utils/step_limit_manager.pyi +112 -0
- aip_agents/utils/token_usage_helper.py +156 -0
- aip_agents/utils/token_usage_helper.pyi +60 -0
- aip_agents_binary-0.5.20.dist-info/METADATA +681 -0
- aip_agents_binary-0.5.20.dist-info/RECORD +546 -0
- aip_agents_binary-0.5.20.dist-info/WHEEL +5 -0
- aip_agents_binary-0.5.20.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,1086 @@
|
|
|
1
|
+
"""Defines the base abstract class for A2A server-side executors.
|
|
2
|
+
|
|
3
|
+
Authors:
|
|
4
|
+
Christian Trisno Sen Long Chen (christian.t.s.l.chen@gdplabs.id)
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import asyncio
|
|
8
|
+
import time
|
|
9
|
+
import uuid
|
|
10
|
+
from abc import ABC, abstractmethod
|
|
11
|
+
from collections.abc import Awaitable, Callable
|
|
12
|
+
from dataclasses import dataclass
|
|
13
|
+
from datetime import UTC, datetime
|
|
14
|
+
from typing import Any
|
|
15
|
+
|
|
16
|
+
from a2a.server.agent_execution import AgentExecutor as A2ASDKExecutor
|
|
17
|
+
from a2a.server.agent_execution import RequestContext
|
|
18
|
+
from a2a.server.events.event_queue import EventQueue
|
|
19
|
+
from a2a.server.tasks import TaskUpdater
|
|
20
|
+
from a2a.types import (
|
|
21
|
+
FilePart,
|
|
22
|
+
FileWithBytes,
|
|
23
|
+
Message,
|
|
24
|
+
Part,
|
|
25
|
+
Role,
|
|
26
|
+
TaskArtifactUpdateEvent,
|
|
27
|
+
TaskState,
|
|
28
|
+
TaskStatus,
|
|
29
|
+
TaskStatusUpdateEvent,
|
|
30
|
+
TextPart,
|
|
31
|
+
)
|
|
32
|
+
from a2a.utils import get_text_parts, new_agent_text_message, new_text_artifact
|
|
33
|
+
from a2a.utils.artifact import new_artifact
|
|
34
|
+
|
|
35
|
+
from aip_agents.types import A2AEvent, A2AStreamEventType
|
|
36
|
+
from aip_agents.utils import serialize_references_for_metadata
|
|
37
|
+
from aip_agents.utils.artifact_helpers import ArtifactHandler
|
|
38
|
+
from aip_agents.utils.logger import get_logger
|
|
39
|
+
from aip_agents.utils.metadata_helper import MetadataFieldKeys
|
|
40
|
+
|
|
41
|
+
logger = get_logger(__name__)
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
@dataclass
|
|
45
|
+
class StatusUpdateParams:
|
|
46
|
+
"""Parameters for status updates."""
|
|
47
|
+
|
|
48
|
+
metadata: dict[str, Any] | None = None
|
|
49
|
+
final: bool = False
|
|
50
|
+
task_id: str | None = None
|
|
51
|
+
context_id: str | None = None
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
class BaseA2AExecutor(A2ASDKExecutor, ABC):
|
|
55
|
+
"""Abstract base class for GLLM Agent framework's A2A server-side executors.
|
|
56
|
+
|
|
57
|
+
This class extends the A2A SDK's `AgentExecutor`. It serves as a common
|
|
58
|
+
foundation for specific executors tailored to different agent types within the
|
|
59
|
+
`aip-agents` framework, such as `LangGraphA2AExecutor` or
|
|
60
|
+
`GoogleADKA2AExecutor`.
|
|
61
|
+
|
|
62
|
+
Subclasses are required to implement the `execute` method to handle A2A
|
|
63
|
+
requests. The `cancel` method has a common implementation.
|
|
64
|
+
|
|
65
|
+
Attributes:
|
|
66
|
+
_active_tasks (dict[str, asyncio.Task]): A dictionary mapping task IDs to
|
|
67
|
+
their corresponding asyncio.Task instances for active agent executions.
|
|
68
|
+
"""
|
|
69
|
+
|
|
70
|
+
def __init__(self) -> None:
|
|
71
|
+
"""Initializes the BaseA2AExecutor."""
|
|
72
|
+
self._active_tasks: dict[str, asyncio.Task] = {}
|
|
73
|
+
self._processed_artifacts: dict[str, set[str]] = {} # task_id -> set of artifact hashes
|
|
74
|
+
self._streaming_artifacts: dict[str, bool] = {} # task_id -> has_streaming_content
|
|
75
|
+
self._streaming_artifact_ids: dict[str, str] = {} # task_id -> artifact_id for consistent streaming
|
|
76
|
+
# Track cumulative time per task (monotonic seconds) for metadata.time
|
|
77
|
+
self._task_start_times: dict[str, float] = {}
|
|
78
|
+
|
|
79
|
+
def _remove_active_task(self, task_id: str) -> None:
|
|
80
|
+
"""Removes an active task from the internal tracking dictionary.
|
|
81
|
+
|
|
82
|
+
Args:
|
|
83
|
+
task_id (str): The ID of the task to remove.
|
|
84
|
+
"""
|
|
85
|
+
if task_id in self._active_tasks:
|
|
86
|
+
self._active_tasks.pop(task_id)
|
|
87
|
+
# Clean up processed artifacts for this task
|
|
88
|
+
if task_id in self._processed_artifacts:
|
|
89
|
+
self._processed_artifacts.pop(task_id)
|
|
90
|
+
# Clean up streaming artifacts tracking
|
|
91
|
+
if task_id in self._streaming_artifacts:
|
|
92
|
+
self._streaming_artifacts.pop(task_id)
|
|
93
|
+
# Clean up streaming artifact IDs
|
|
94
|
+
if task_id in self._streaming_artifact_ids:
|
|
95
|
+
self._streaming_artifact_ids.pop(task_id)
|
|
96
|
+
# Clean up task start time tracking
|
|
97
|
+
if task_id in self._task_start_times:
|
|
98
|
+
self._task_start_times.pop(task_id)
|
|
99
|
+
|
|
100
|
+
def _apply_cumulative_time(self, task_id: str, metadata: dict[str, Any] | None) -> None:
|
|
101
|
+
"""Ensure metadata.time is cumulative since first status event for the task.
|
|
102
|
+
|
|
103
|
+
Args:
|
|
104
|
+
task_id: The A2A task ID.
|
|
105
|
+
metadata: The metadata dict to mutate.
|
|
106
|
+
"""
|
|
107
|
+
if metadata is None:
|
|
108
|
+
return
|
|
109
|
+
now = time.monotonic()
|
|
110
|
+
start = self._task_start_times.get(task_id)
|
|
111
|
+
if start is None:
|
|
112
|
+
self._task_start_times[task_id] = now
|
|
113
|
+
elapsed = 0.0
|
|
114
|
+
else:
|
|
115
|
+
elapsed = max(0.0, now - start)
|
|
116
|
+
# Always use string keys in metadata to ensure JSON-serializable output
|
|
117
|
+
metadata[MetadataFieldKeys.TIME] = elapsed
|
|
118
|
+
|
|
119
|
+
async def _handle_initial_execute_checks(
|
|
120
|
+
self, context: RequestContext, event_queue: EventQueue
|
|
121
|
+
) -> tuple[TaskUpdater | None, str | None, dict[str, Any] | None]:
|
|
122
|
+
"""Performs initial validation and setup for an incoming agent request.
|
|
123
|
+
|
|
124
|
+
This method checks for the presence of message content, extracts the query and metadata,
|
|
125
|
+
and initializes the task status with the A2A server.
|
|
126
|
+
|
|
127
|
+
Args:
|
|
128
|
+
context (RequestContext): The A2A request context, containing message
|
|
129
|
+
details and task information.
|
|
130
|
+
event_queue (EventQueue): The queue for sending task status updates back
|
|
131
|
+
to the A2A server.
|
|
132
|
+
|
|
133
|
+
Returns:
|
|
134
|
+
tuple[TaskUpdater | None, str | None, dict[str, typing.Any] | None]: A tuple containing a `TaskUpdater`
|
|
135
|
+
instance, the extracted query string, and extracted metadata dictionary if initial checks pass. If checks
|
|
136
|
+
fail (e.g., no message content), it returns (None, None, None) and will have
|
|
137
|
+
already sent a failure status through the updater.
|
|
138
|
+
"""
|
|
139
|
+
if not context.message or not context.message.parts:
|
|
140
|
+
updater = TaskUpdater(event_queue, context.task_id, context.context_id)
|
|
141
|
+
await updater.failed(message=new_agent_text_message("No message content provided."))
|
|
142
|
+
return None, None, None
|
|
143
|
+
|
|
144
|
+
query_parts = get_text_parts(context.message.parts)
|
|
145
|
+
query = "\n".join(query_parts)
|
|
146
|
+
|
|
147
|
+
if not query:
|
|
148
|
+
updater = TaskUpdater(event_queue, context.task_id, context.context_id)
|
|
149
|
+
await updater.failed(message=new_agent_text_message("Extracted query is empty."))
|
|
150
|
+
return None, None, None
|
|
151
|
+
|
|
152
|
+
# Extract metadata from both message and request params
|
|
153
|
+
metadata = self._extract_metadata(context)
|
|
154
|
+
|
|
155
|
+
updater = TaskUpdater(event_queue, context.task_id, context.context_id)
|
|
156
|
+
if not context.current_task:
|
|
157
|
+
await updater.submit()
|
|
158
|
+
await updater.start_work()
|
|
159
|
+
return updater, query, metadata
|
|
160
|
+
|
|
161
|
+
def _extract_metadata(self, context: RequestContext) -> dict[str, Any]:
|
|
162
|
+
"""Extracts metadata from the request context.
|
|
163
|
+
|
|
164
|
+
This method combines metadata from both the message and the request parameters
|
|
165
|
+
to provide a comprehensive metadata dictionary for the agent.
|
|
166
|
+
|
|
167
|
+
Args:
|
|
168
|
+
context (RequestContext): The A2A request context containing message and params.
|
|
169
|
+
|
|
170
|
+
Returns:
|
|
171
|
+
dict[str, Any]: A dictionary containing all available metadata.
|
|
172
|
+
"""
|
|
173
|
+
metadata = {}
|
|
174
|
+
|
|
175
|
+
if context._params and context._params.metadata:
|
|
176
|
+
metadata.update(context._params.metadata)
|
|
177
|
+
|
|
178
|
+
logger.debug(f"Final extracted metadata: {metadata}")
|
|
179
|
+
return metadata
|
|
180
|
+
|
|
181
|
+
async def _update_status(
|
|
182
|
+
self,
|
|
183
|
+
updater: TaskUpdater,
|
|
184
|
+
state: TaskState,
|
|
185
|
+
message: Message,
|
|
186
|
+
params: StatusUpdateParams | None = None,
|
|
187
|
+
) -> None:
|
|
188
|
+
"""Update task status with metadata placed in TaskStatusUpdateEvent according to A2A spec.
|
|
189
|
+
|
|
190
|
+
This method creates a TaskStatusUpdateEvent with metadata in the correct location
|
|
191
|
+
(the event's metadata field) rather than in the message metadata field.
|
|
192
|
+
|
|
193
|
+
Args:
|
|
194
|
+
updater (TaskUpdater): The TaskUpdater instance for sending status updates.
|
|
195
|
+
state (TaskState): The new task state.
|
|
196
|
+
message (Message): The message associated with the status update.
|
|
197
|
+
params (StatusUpdateParams | None): Parameters for the status update.
|
|
198
|
+
"""
|
|
199
|
+
current_timestamp = datetime.now(UTC).isoformat()
|
|
200
|
+
|
|
201
|
+
# Use defaults if params not provided
|
|
202
|
+
if params is None:
|
|
203
|
+
params = StatusUpdateParams()
|
|
204
|
+
|
|
205
|
+
# Use provided task_id and context_id, or extract from message/updater
|
|
206
|
+
task_id = params.task_id
|
|
207
|
+
if task_id is None:
|
|
208
|
+
task_id = message.taskId
|
|
209
|
+
context_id = params.context_id
|
|
210
|
+
if context_id is None:
|
|
211
|
+
context_id = message.contextId
|
|
212
|
+
|
|
213
|
+
# Ensure we have valid IDs
|
|
214
|
+
if task_id is None or context_id is None:
|
|
215
|
+
raise ValueError("task_id and context_id must be provided or available in the message")
|
|
216
|
+
|
|
217
|
+
# Ensure metadata exists and apply cumulative time for this task
|
|
218
|
+
metadata = params.metadata or {}
|
|
219
|
+
try:
|
|
220
|
+
self._apply_cumulative_time(task_id, metadata)
|
|
221
|
+
except Exception as e:
|
|
222
|
+
logger.warning(f"Failed to apply cumulative time for task {task_id}: {e}")
|
|
223
|
+
|
|
224
|
+
event = TaskStatusUpdateEvent(
|
|
225
|
+
taskId=task_id,
|
|
226
|
+
contextId=context_id,
|
|
227
|
+
final=params.final,
|
|
228
|
+
status=TaskStatus(
|
|
229
|
+
state=state,
|
|
230
|
+
message=message,
|
|
231
|
+
timestamp=current_timestamp,
|
|
232
|
+
),
|
|
233
|
+
metadata=metadata,
|
|
234
|
+
)
|
|
235
|
+
|
|
236
|
+
# Use status-specific enqueue to ensure servers treat it as a status event
|
|
237
|
+
try:
|
|
238
|
+
await updater.event_queue.enqueue_status(event)
|
|
239
|
+
except AttributeError:
|
|
240
|
+
# Fallback for older SDKs without enqueue_status
|
|
241
|
+
await updater.event_queue.enqueue_event(event)
|
|
242
|
+
|
|
243
|
+
async def _execute_agent_processing(
|
|
244
|
+
self,
|
|
245
|
+
agent_processing_coro: Awaitable[None],
|
|
246
|
+
updater: TaskUpdater,
|
|
247
|
+
task_id: str,
|
|
248
|
+
context_id: str | None = None,
|
|
249
|
+
) -> None:
|
|
250
|
+
"""Manages the execution lifecycle of an agent processing coroutine.
|
|
251
|
+
|
|
252
|
+
This method creates an asyncio task for the provided agent processing
|
|
253
|
+
coroutine, stores it for potential cancellation, and awaits its completion.
|
|
254
|
+
It handles `asyncio.CancelledError` to update task status to cancelled
|
|
255
|
+
and logs other exceptions, marking the task as failed.
|
|
256
|
+
|
|
257
|
+
Args:
|
|
258
|
+
agent_processing_coro (typing.Awaitable[None]): The coroutine that
|
|
259
|
+
performs the agent-specific processing (e.g., streaming results).
|
|
260
|
+
updater (TaskUpdater): The TaskUpdater instance for sending status updates.
|
|
261
|
+
task_id (str): The unique ID of the A2A task.
|
|
262
|
+
context_id (str | None): The context ID. Defaults to None.
|
|
263
|
+
"""
|
|
264
|
+
task = asyncio.create_task(agent_processing_coro)
|
|
265
|
+
self._active_tasks[task_id] = task
|
|
266
|
+
|
|
267
|
+
try:
|
|
268
|
+
await task
|
|
269
|
+
except asyncio.CancelledError:
|
|
270
|
+
# This specific CancelledError is raised if the task created from
|
|
271
|
+
# agent_processing_coro is cancelled externally (e.g., by the cancel method).
|
|
272
|
+
logger.info(f"Agent processing task {task_id} was cancelled by client request.")
|
|
273
|
+
await self._update_status(
|
|
274
|
+
updater,
|
|
275
|
+
TaskState.canceled,
|
|
276
|
+
new_agent_text_message("Task was cancelled by client."),
|
|
277
|
+
params=StatusUpdateParams(task_id=task_id, context_id=context_id),
|
|
278
|
+
)
|
|
279
|
+
raise
|
|
280
|
+
except Exception as e:
|
|
281
|
+
self._remove_active_task(task_id)
|
|
282
|
+
logger.error(f"Error during agent execution for task {task_id}: {e}", exc_info=True)
|
|
283
|
+
await self._update_status(
|
|
284
|
+
updater,
|
|
285
|
+
TaskState.failed,
|
|
286
|
+
new_agent_text_message(f"Error during execution: {str(e)}"),
|
|
287
|
+
params=StatusUpdateParams(final=True, task_id=task_id, context_id=context_id),
|
|
288
|
+
)
|
|
289
|
+
finally:
|
|
290
|
+
self._remove_active_task(task_id)
|
|
291
|
+
|
|
292
|
+
async def _handle_artifact_event(
|
|
293
|
+
self,
|
|
294
|
+
payload: dict[str, Any],
|
|
295
|
+
updater: TaskUpdater,
|
|
296
|
+
task_id: str | None = None,
|
|
297
|
+
) -> bool:
|
|
298
|
+
"""Handles an artifact event from the agent stream.
|
|
299
|
+
|
|
300
|
+
Args:
|
|
301
|
+
payload (dict[str, typing.Any]): The artifact payload containing data, name, etc.
|
|
302
|
+
updater (TaskUpdater): The TaskUpdater instance for sending artifact updates.
|
|
303
|
+
task_id (str | None): The task ID for deduplication tracking.
|
|
304
|
+
|
|
305
|
+
Returns:
|
|
306
|
+
bool: False to continue stream, True if there was an error that should stop processing.
|
|
307
|
+
"""
|
|
308
|
+
try:
|
|
309
|
+
# Validate and extract artifact data
|
|
310
|
+
artifact_data_b64 = payload.get("data")
|
|
311
|
+
if not artifact_data_b64:
|
|
312
|
+
logger.warning("Artifact payload missing 'data' field")
|
|
313
|
+
return False
|
|
314
|
+
|
|
315
|
+
# Check for duplicates
|
|
316
|
+
if task_id and self._is_duplicate_artifact(payload, task_id):
|
|
317
|
+
return False
|
|
318
|
+
|
|
319
|
+
# Create A2A-compliant artifact
|
|
320
|
+
artifact_to_send = self._create_a2a_artifact(payload)
|
|
321
|
+
if not artifact_to_send:
|
|
322
|
+
return False
|
|
323
|
+
|
|
324
|
+
# Merge payload metadata (if any) and apply cumulative time
|
|
325
|
+
event_metadata: dict[str, Any] | None = None
|
|
326
|
+
try:
|
|
327
|
+
raw_md = payload.get("metadata") if isinstance(payload, dict) else None
|
|
328
|
+
event_metadata = raw_md.copy() if isinstance(raw_md, dict) else {}
|
|
329
|
+
except Exception as e:
|
|
330
|
+
logger.warning(f"Failed to copy artifact metadata for task {task_id}: {e}")
|
|
331
|
+
event_metadata = {}
|
|
332
|
+
if task_id:
|
|
333
|
+
try:
|
|
334
|
+
self._apply_cumulative_time(task_id, event_metadata)
|
|
335
|
+
except Exception as e:
|
|
336
|
+
logger.warning(f"Failed to apply cumulative time to artifact metadata for task {task_id}: {e}")
|
|
337
|
+
|
|
338
|
+
# Send artifact to client with metadata
|
|
339
|
+
return await self._send_artifact_to_client(artifact_to_send, updater, metadata=event_metadata)
|
|
340
|
+
|
|
341
|
+
except Exception as e:
|
|
342
|
+
logger.error(f"Failed to process artifact payload: {e}", exc_info=True)
|
|
343
|
+
return False
|
|
344
|
+
|
|
345
|
+
def _is_duplicate_artifact(self, payload: dict[str, Any], task_id: str) -> bool:
|
|
346
|
+
"""Check if artifact is a duplicate and handle deduplication tracking.
|
|
347
|
+
|
|
348
|
+
Args:
|
|
349
|
+
payload: The artifact payload.
|
|
350
|
+
task_id: The task ID for tracking.
|
|
351
|
+
|
|
352
|
+
Returns:
|
|
353
|
+
True if artifact is a duplicate, False otherwise.
|
|
354
|
+
"""
|
|
355
|
+
artifact_hash = ArtifactHandler.generate_artifact_hash(
|
|
356
|
+
payload.get("data", ""),
|
|
357
|
+
payload.get("name", ""),
|
|
358
|
+
payload.get("mime_type", ""),
|
|
359
|
+
)
|
|
360
|
+
|
|
361
|
+
if task_id not in self._processed_artifacts:
|
|
362
|
+
self._processed_artifacts[task_id] = set()
|
|
363
|
+
|
|
364
|
+
if artifact_hash in self._processed_artifacts[task_id]:
|
|
365
|
+
logger.warning(
|
|
366
|
+
f"Skipping duplicate artifact: {payload.get('name', 'unnamed')} (hash: {artifact_hash[:8]}...)"
|
|
367
|
+
)
|
|
368
|
+
return True
|
|
369
|
+
|
|
370
|
+
self._processed_artifacts[task_id].add(artifact_hash)
|
|
371
|
+
logger.info(f"Processing new artifact: {payload.get('name', 'unnamed')} (hash: {artifact_hash[:8]}...)")
|
|
372
|
+
return False
|
|
373
|
+
|
|
374
|
+
def _create_a2a_artifact(self, payload: dict[str, Any]) -> Any | None:
|
|
375
|
+
"""Create A2A-compliant artifact from payload with flexible part type support.
|
|
376
|
+
|
|
377
|
+
Args:
|
|
378
|
+
payload: The artifact payload.
|
|
379
|
+
|
|
380
|
+
Returns:
|
|
381
|
+
A2A artifact object or None if creation failed.
|
|
382
|
+
"""
|
|
383
|
+
try:
|
|
384
|
+
# Determine the appropriate part type based on payload content
|
|
385
|
+
part = self._create_artifact_part(payload)
|
|
386
|
+
if not part:
|
|
387
|
+
return None
|
|
388
|
+
|
|
389
|
+
artifact_to_send = new_artifact(
|
|
390
|
+
parts=[Part(root=part)],
|
|
391
|
+
name=payload.get("name", "Generated Artifact"),
|
|
392
|
+
description=payload.get("description", ""),
|
|
393
|
+
)
|
|
394
|
+
return artifact_to_send
|
|
395
|
+
|
|
396
|
+
except Exception as artifact_error:
|
|
397
|
+
logger.error(f"Failed to create artifact: {artifact_error}")
|
|
398
|
+
return None
|
|
399
|
+
|
|
400
|
+
def _create_artifact_part(self, payload: dict[str, Any]) -> Any | None:
|
|
401
|
+
"""Create the appropriate part type based on payload content.
|
|
402
|
+
|
|
403
|
+
Args:
|
|
404
|
+
payload: The artifact payload.
|
|
405
|
+
|
|
406
|
+
Returns:
|
|
407
|
+
A Part object (TextPart, FilePart, etc.) or None if creation failed.
|
|
408
|
+
"""
|
|
409
|
+
try:
|
|
410
|
+
# Check if this is text content (common for streaming responses)
|
|
411
|
+
if "text" in payload or ("mime_type" in payload and payload["mime_type"].startswith("text/")):
|
|
412
|
+
return TextPart(
|
|
413
|
+
kind="text",
|
|
414
|
+
text=payload.get("text", payload.get("data", "")),
|
|
415
|
+
metadata=payload.get("metadata"),
|
|
416
|
+
)
|
|
417
|
+
|
|
418
|
+
# Check if this has binary data (files, images, etc.)
|
|
419
|
+
elif "data" in payload:
|
|
420
|
+
return FilePart(
|
|
421
|
+
kind="file",
|
|
422
|
+
file=FileWithBytes(
|
|
423
|
+
bytes=payload.get("data"),
|
|
424
|
+
name=payload.get("name", "artifact"),
|
|
425
|
+
mimeType=payload.get("mime_type", "application/octet-stream"),
|
|
426
|
+
),
|
|
427
|
+
metadata=payload.get("metadata"),
|
|
428
|
+
)
|
|
429
|
+
|
|
430
|
+
# Fallback to text part for any other content
|
|
431
|
+
else:
|
|
432
|
+
content = str(payload.get("content", payload.get("data", "")))
|
|
433
|
+
return TextPart(
|
|
434
|
+
kind="text",
|
|
435
|
+
text=content,
|
|
436
|
+
metadata=payload.get("metadata"),
|
|
437
|
+
)
|
|
438
|
+
|
|
439
|
+
except Exception as part_error:
|
|
440
|
+
logger.error(f"Failed to create artifact part: {part_error}")
|
|
441
|
+
return None
|
|
442
|
+
|
|
443
|
+
async def _send_artifact_to_client(
|
|
444
|
+
self,
|
|
445
|
+
artifact: Any,
|
|
446
|
+
updater: TaskUpdater,
|
|
447
|
+
metadata: dict[str, Any] | None = None,
|
|
448
|
+
) -> bool:
|
|
449
|
+
"""Send artifact to client as a TaskArtifactUpdateEvent with event metadata.
|
|
450
|
+
|
|
451
|
+
Args:
|
|
452
|
+
artifact: The A2A artifact to send.
|
|
453
|
+
updater: The TaskUpdater instance used to enqueue artifact events.
|
|
454
|
+
metadata: Optional event-level metadata to include with the update
|
|
455
|
+
(e.g., cumulative time, tracing fields). This is attached to the
|
|
456
|
+
TaskArtifactUpdateEvent so clients receive it on the event envelope.
|
|
457
|
+
|
|
458
|
+
Returns:
|
|
459
|
+
False to continue stream on success, True if there was an error.
|
|
460
|
+
"""
|
|
461
|
+
try:
|
|
462
|
+
event = TaskArtifactUpdateEvent(
|
|
463
|
+
taskId=updater.task_id,
|
|
464
|
+
contextId=updater.context_id,
|
|
465
|
+
artifact=artifact,
|
|
466
|
+
append=None,
|
|
467
|
+
lastChunk=None,
|
|
468
|
+
metadata=metadata,
|
|
469
|
+
)
|
|
470
|
+
await updater.event_queue.enqueue_event(event)
|
|
471
|
+
logger.info(f"Successfully sent artifact '{artifact.name}' to client")
|
|
472
|
+
return False # Continue stream
|
|
473
|
+
|
|
474
|
+
except Exception as send_error:
|
|
475
|
+
logger.error(f"Failed to send artifact to client: {send_error}")
|
|
476
|
+
return True # Error occurred, stop processing
|
|
477
|
+
|
|
478
|
+
async def _send_content_as_artifact( # noqa: PLR0913
|
|
479
|
+
self,
|
|
480
|
+
content: str,
|
|
481
|
+
event_queue: EventQueue,
|
|
482
|
+
task_id: str,
|
|
483
|
+
context_id: str,
|
|
484
|
+
append: bool = True,
|
|
485
|
+
last_chunk: bool = False,
|
|
486
|
+
artifact_name: str = "streaming_response",
|
|
487
|
+
metadata: dict[str, Any] | None = None,
|
|
488
|
+
) -> None:
|
|
489
|
+
"""Send content as a streaming artifact update event.
|
|
490
|
+
|
|
491
|
+
This method creates and sends a TaskArtifactUpdateEvent for content delivery,
|
|
492
|
+
which is the correct way to stream content according to A2A protocol.
|
|
493
|
+
|
|
494
|
+
Args:
|
|
495
|
+
content (str): The content to send as an artifact.
|
|
496
|
+
event_queue (EventQueue): The event queue for sending the artifact event.
|
|
497
|
+
task_id (str): The task ID.
|
|
498
|
+
context_id (str): The context ID.
|
|
499
|
+
append (bool): Whether this content should be appended to previous chunks.
|
|
500
|
+
Defaults to True for streaming content.
|
|
501
|
+
last_chunk (bool): Whether this is the final chunk. Defaults to False.
|
|
502
|
+
artifact_name (str): Name for the artifact. Defaults to "streaming_response".
|
|
503
|
+
metadata (dict[str, Any] | None): Optional metadata to include with the artifact.
|
|
504
|
+
"""
|
|
505
|
+
try:
|
|
506
|
+
# Get or create consistent artifact ID for this streaming task
|
|
507
|
+
if task_id not in self._streaming_artifact_ids:
|
|
508
|
+
# Create new artifact and store its ID
|
|
509
|
+
artifact_payload = new_text_artifact(
|
|
510
|
+
name=artifact_name,
|
|
511
|
+
description="Streaming response from the agent.",
|
|
512
|
+
text=content,
|
|
513
|
+
)
|
|
514
|
+
self._streaming_artifact_ids[task_id] = artifact_payload.artifactId
|
|
515
|
+
else:
|
|
516
|
+
# Reuse existing artifact ID for consistency
|
|
517
|
+
artifact_payload = new_text_artifact(
|
|
518
|
+
name=artifact_name,
|
|
519
|
+
description="Streaming response from the agent.",
|
|
520
|
+
text=content,
|
|
521
|
+
)
|
|
522
|
+
# Override the auto-generated ID with our consistent one
|
|
523
|
+
artifact_payload.artifactId = self._streaming_artifact_ids[task_id]
|
|
524
|
+
|
|
525
|
+
artifact_event = TaskArtifactUpdateEvent(
|
|
526
|
+
append=append,
|
|
527
|
+
contextId=context_id,
|
|
528
|
+
taskId=task_id,
|
|
529
|
+
lastChunk=last_chunk,
|
|
530
|
+
artifact=artifact_payload,
|
|
531
|
+
metadata=metadata,
|
|
532
|
+
)
|
|
533
|
+
|
|
534
|
+
await event_queue.enqueue_event(artifact_event)
|
|
535
|
+
logger.debug(f"Sent content as artifact: {artifact_name} (append={append}, lastChunk={last_chunk})")
|
|
536
|
+
|
|
537
|
+
except Exception as e:
|
|
538
|
+
logger.error(f"Failed to send content as artifact: {e}", exc_info=True)
|
|
539
|
+
|
|
540
|
+
async def _handle_stream_event( # noqa: PLR0913, PLR0911 TODO: Refactor this
|
|
541
|
+
self,
|
|
542
|
+
chunk: A2AEvent,
|
|
543
|
+
updater: TaskUpdater,
|
|
544
|
+
task_id: str,
|
|
545
|
+
context_id: str,
|
|
546
|
+
event_queue: EventQueue,
|
|
547
|
+
metadata: dict[str, Any] | None = None,
|
|
548
|
+
) -> bool:
|
|
549
|
+
"""Handle semantically typed A2A events with type-based dispatching.
|
|
550
|
+
|
|
551
|
+
This method processes A2AEvent objects using their semantic event types,
|
|
552
|
+
eliminating the need for string parsing and JSON decoding. Each event type
|
|
553
|
+
is handled by a dedicated method for better maintainability.
|
|
554
|
+
|
|
555
|
+
Args:
|
|
556
|
+
chunk: The A2AEvent to process with semantic type information.
|
|
557
|
+
updater: TaskUpdater instance for sending A2A status updates.
|
|
558
|
+
task_id: Unique identifier for the A2A task.
|
|
559
|
+
context_id: Context identifier for the A2A session.
|
|
560
|
+
event_queue: Event queue for sending artifact update events.
|
|
561
|
+
metadata: Optional metadata to merge with chunk metadata.
|
|
562
|
+
|
|
563
|
+
Returns:
|
|
564
|
+
bool: True if stream processing should terminate, False to continue.
|
|
565
|
+
"""
|
|
566
|
+
event_type = chunk.get("event_type")
|
|
567
|
+
|
|
568
|
+
# Convert string event type to Enum if possible to match handler_map keys
|
|
569
|
+
if isinstance(event_type, str):
|
|
570
|
+
try:
|
|
571
|
+
event_type = A2AStreamEventType(event_type)
|
|
572
|
+
except ValueError:
|
|
573
|
+
# Keep as string if not a valid enum member (will likely fall through to unknown)
|
|
574
|
+
event_type = chunk.get("event_type")
|
|
575
|
+
|
|
576
|
+
# Prepare metadata and handle artifacts
|
|
577
|
+
final_metadata = self._prepare_event_metadata(chunk, metadata)
|
|
578
|
+
self._apply_cumulative_time(task_id, final_metadata)
|
|
579
|
+
await self._process_event_artifacts(chunk, updater, task_id)
|
|
580
|
+
|
|
581
|
+
# Dispatch to appropriate handler based on event type
|
|
582
|
+
handler_map: dict[Any, Callable[[], Awaitable[bool]]] = {
|
|
583
|
+
A2AStreamEventType.TOOL_CALL: lambda: self._handle_tool_call_event(
|
|
584
|
+
chunk, updater, task_id, context_id, metadata
|
|
585
|
+
),
|
|
586
|
+
A2AStreamEventType.TOOL_RESULT: lambda: self._handle_tool_result_event(
|
|
587
|
+
chunk, updater, task_id, context_id, metadata
|
|
588
|
+
),
|
|
589
|
+
A2AStreamEventType.CONTENT_CHUNK: lambda: self._handle_content_chunk_event(
|
|
590
|
+
chunk, event_queue, task_id, context_id, final_metadata
|
|
591
|
+
),
|
|
592
|
+
A2AStreamEventType.FINAL_RESPONSE: lambda: self._handle_final_response_event(
|
|
593
|
+
chunk, updater, event_queue, task_id, context_id, final_metadata
|
|
594
|
+
),
|
|
595
|
+
A2AStreamEventType.STATUS_UPDATE: lambda: self._handle_status_update_event(
|
|
596
|
+
chunk, updater, task_id, context_id
|
|
597
|
+
),
|
|
598
|
+
A2AStreamEventType.STEP_LIMIT_EXCEEDED: lambda: self._handle_step_limit_exceeded_event(
|
|
599
|
+
chunk, updater, task_id, context_id
|
|
600
|
+
),
|
|
601
|
+
A2AStreamEventType.ERROR: lambda: self._handle_error_event(chunk, updater, task_id, context_id),
|
|
602
|
+
}
|
|
603
|
+
|
|
604
|
+
handler = handler_map.get(event_type)
|
|
605
|
+
if handler:
|
|
606
|
+
return await handler()
|
|
607
|
+
|
|
608
|
+
logger.warning(f"Unknown event type: {event_type}")
|
|
609
|
+
return False
|
|
610
|
+
|
|
611
|
+
def _prepare_event_metadata(self, chunk: A2AEvent, metadata: dict[str, Any] | None) -> dict[str, Any]:
|
|
612
|
+
"""Prepare final metadata by merging chunk and provided metadata.
|
|
613
|
+
|
|
614
|
+
Args:
|
|
615
|
+
chunk: A2AEvent containing chunk metadata.
|
|
616
|
+
metadata: Optional additional metadata to merge.
|
|
617
|
+
|
|
618
|
+
Returns:
|
|
619
|
+
dict[str, Any]: Merged metadata dictionary.
|
|
620
|
+
"""
|
|
621
|
+
final_metadata = {}
|
|
622
|
+
if metadata:
|
|
623
|
+
final_metadata.update(metadata)
|
|
624
|
+
|
|
625
|
+
chunk_metadata = chunk.get("metadata", {})
|
|
626
|
+
if chunk_metadata:
|
|
627
|
+
final_metadata.update(chunk_metadata)
|
|
628
|
+
|
|
629
|
+
if chunk.get(MetadataFieldKeys.REFERENCES):
|
|
630
|
+
final_metadata[MetadataFieldKeys.REFERENCES] = serialize_references_for_metadata(
|
|
631
|
+
chunk[MetadataFieldKeys.REFERENCES]
|
|
632
|
+
)
|
|
633
|
+
|
|
634
|
+
event_type_value = chunk.get("event_type")
|
|
635
|
+
if isinstance(event_type_value, A2AStreamEventType):
|
|
636
|
+
final_metadata["event_type"] = event_type_value.value
|
|
637
|
+
elif isinstance(event_type_value, str):
|
|
638
|
+
final_metadata["event_type"] = event_type_value
|
|
639
|
+
|
|
640
|
+
# Merge selected top-level fields from chunk into metadata using string keys
|
|
641
|
+
metadata_fields = [
|
|
642
|
+
MetadataFieldKeys.TOOL_INFO,
|
|
643
|
+
MetadataFieldKeys.STEP_USAGE,
|
|
644
|
+
MetadataFieldKeys.TOTAL_USAGE,
|
|
645
|
+
MetadataFieldKeys.THINKING_AND_ACTIVITY_INFO,
|
|
646
|
+
]
|
|
647
|
+
|
|
648
|
+
for key in metadata_fields:
|
|
649
|
+
value = chunk.get(key)
|
|
650
|
+
if value is not None and value: # Only overwrite if value is truthy
|
|
651
|
+
final_metadata[key] = value
|
|
652
|
+
|
|
653
|
+
return final_metadata
|
|
654
|
+
|
|
655
|
+
async def _process_event_artifacts(self, chunk: A2AEvent, updater: TaskUpdater, task_id: str) -> None:
|
|
656
|
+
"""Process any artifacts attached to the event.
|
|
657
|
+
|
|
658
|
+
Args:
|
|
659
|
+
chunk: A2AEvent that may contain artifacts.
|
|
660
|
+
updater: TaskUpdater for handling artifact events.
|
|
661
|
+
task_id: Task identifier for artifact processing.
|
|
662
|
+
"""
|
|
663
|
+
if "artifacts" in chunk and chunk["artifacts"]:
|
|
664
|
+
for artifact_data in chunk["artifacts"]:
|
|
665
|
+
await self._handle_artifact_event(artifact_data, updater, task_id)
|
|
666
|
+
|
|
667
|
+
async def _handle_tool_call_event(
|
|
668
|
+
self,
|
|
669
|
+
chunk: A2AEvent,
|
|
670
|
+
updater: TaskUpdater,
|
|
671
|
+
task_id: str,
|
|
672
|
+
context_id: str,
|
|
673
|
+
metadata: dict[str, Any] | None = None,
|
|
674
|
+
) -> bool:
|
|
675
|
+
"""Handle TOOL_CALL event by sending appropriate status update.
|
|
676
|
+
|
|
677
|
+
Args:
|
|
678
|
+
chunk: A2AEvent with TOOL_CALL type and tool information.
|
|
679
|
+
updater: TaskUpdater for sending status updates.
|
|
680
|
+
task_id: Task identifier.
|
|
681
|
+
context_id: Context identifier.
|
|
682
|
+
metadata: Optional metadata to include with the status update.
|
|
683
|
+
|
|
684
|
+
Returns:
|
|
685
|
+
bool: False to continue stream processing.
|
|
686
|
+
"""
|
|
687
|
+
status_message = chunk["content"]
|
|
688
|
+
final_metadata = self._prepare_event_metadata(chunk, metadata)
|
|
689
|
+
message = Message(
|
|
690
|
+
role=Role.agent,
|
|
691
|
+
parts=[Part(root=TextPart(text=status_message))],
|
|
692
|
+
messageId=str(uuid.uuid4()),
|
|
693
|
+
taskId=task_id,
|
|
694
|
+
contextId=context_id,
|
|
695
|
+
)
|
|
696
|
+
|
|
697
|
+
await self._update_status(
|
|
698
|
+
updater,
|
|
699
|
+
TaskState.working,
|
|
700
|
+
message,
|
|
701
|
+
StatusUpdateParams(metadata=final_metadata, task_id=task_id, context_id=context_id),
|
|
702
|
+
)
|
|
703
|
+
return False
|
|
704
|
+
|
|
705
|
+
async def _handle_tool_result_event(
|
|
706
|
+
self,
|
|
707
|
+
chunk: A2AEvent,
|
|
708
|
+
updater: TaskUpdater,
|
|
709
|
+
task_id: str,
|
|
710
|
+
context_id: str,
|
|
711
|
+
metadata: dict[str, Any] | None = None,
|
|
712
|
+
) -> bool:
|
|
713
|
+
"""Handle TOOL_RESULT event by sending completion status update.
|
|
714
|
+
|
|
715
|
+
Args:
|
|
716
|
+
chunk: A2AEvent with TOOL_RESULT type and execution details.
|
|
717
|
+
updater: TaskUpdater for sending status updates.
|
|
718
|
+
task_id: Task identifier.
|
|
719
|
+
context_id: Context identifier.
|
|
720
|
+
metadata: Optional metadata to include with the status update.
|
|
721
|
+
|
|
722
|
+
Returns:
|
|
723
|
+
bool: False to continue stream processing.
|
|
724
|
+
"""
|
|
725
|
+
status_message = self._extract_tool_result_status_message(chunk)
|
|
726
|
+
final_metadata = self._prepare_event_metadata(chunk, metadata)
|
|
727
|
+
message = Message(
|
|
728
|
+
role=Role.agent,
|
|
729
|
+
parts=[Part(root=TextPart(text=status_message))],
|
|
730
|
+
messageId=str(uuid.uuid4()),
|
|
731
|
+
taskId=task_id,
|
|
732
|
+
contextId=context_id,
|
|
733
|
+
)
|
|
734
|
+
|
|
735
|
+
await self._update_status(
|
|
736
|
+
updater,
|
|
737
|
+
TaskState.working,
|
|
738
|
+
message,
|
|
739
|
+
StatusUpdateParams(metadata=final_metadata, task_id=task_id, context_id=context_id),
|
|
740
|
+
)
|
|
741
|
+
return False
|
|
742
|
+
|
|
743
|
+
async def _handle_content_chunk_event(
|
|
744
|
+
self, chunk: A2AEvent, event_queue: EventQueue, task_id: str, context_id: str, final_metadata: dict[str, Any]
|
|
745
|
+
) -> bool:
|
|
746
|
+
"""Handle CONTENT_CHUNK event by streaming content as artifact.
|
|
747
|
+
|
|
748
|
+
Args:
|
|
749
|
+
chunk: A2AEvent with CONTENT_CHUNK type and user content.
|
|
750
|
+
event_queue: Event queue for artifact updates.
|
|
751
|
+
task_id: Task identifier.
|
|
752
|
+
context_id: Context identifier.
|
|
753
|
+
final_metadata: Merged metadata for the artifact.
|
|
754
|
+
|
|
755
|
+
Returns:
|
|
756
|
+
bool: False to continue stream processing.
|
|
757
|
+
"""
|
|
758
|
+
is_first_chunk = task_id not in self._streaming_artifacts
|
|
759
|
+
self._apply_cumulative_time(task_id, final_metadata)
|
|
760
|
+
await self._send_content_as_artifact(
|
|
761
|
+
content=chunk["content"],
|
|
762
|
+
event_queue=event_queue,
|
|
763
|
+
task_id=task_id,
|
|
764
|
+
context_id=context_id,
|
|
765
|
+
append=not is_first_chunk,
|
|
766
|
+
last_chunk=False,
|
|
767
|
+
metadata=final_metadata,
|
|
768
|
+
)
|
|
769
|
+
self._streaming_artifacts[task_id] = True
|
|
770
|
+
return False
|
|
771
|
+
|
|
772
|
+
async def _handle_final_response_event( # noqa: PLR0913
|
|
773
|
+
self,
|
|
774
|
+
chunk: A2AEvent,
|
|
775
|
+
updater: TaskUpdater,
|
|
776
|
+
event_queue: EventQueue,
|
|
777
|
+
task_id: str,
|
|
778
|
+
context_id: str,
|
|
779
|
+
final_metadata: dict[str, Any],
|
|
780
|
+
) -> bool:
|
|
781
|
+
"""Handle FINAL_RESPONSE event by sending final artifact and completing task.
|
|
782
|
+
|
|
783
|
+
Args:
|
|
784
|
+
chunk: A2AEvent with FINAL_RESPONSE type and final content.
|
|
785
|
+
updater: TaskUpdater for task completion.
|
|
786
|
+
event_queue: Event queue for artifact updates.
|
|
787
|
+
task_id: Task identifier.
|
|
788
|
+
context_id: Context identifier.
|
|
789
|
+
final_metadata: Merged metadata for the artifact.
|
|
790
|
+
|
|
791
|
+
Returns:
|
|
792
|
+
bool: True to terminate stream processing.
|
|
793
|
+
"""
|
|
794
|
+
content = chunk["content"]
|
|
795
|
+
has_streaming_content = task_id in self._streaming_artifacts
|
|
796
|
+
|
|
797
|
+
if content is not None:
|
|
798
|
+
self._apply_cumulative_time(task_id, final_metadata)
|
|
799
|
+
artifact_name = "final_response" if not has_streaming_content else "streaming_response"
|
|
800
|
+
await self._send_content_as_artifact(
|
|
801
|
+
content=content,
|
|
802
|
+
event_queue=event_queue,
|
|
803
|
+
task_id=task_id,
|
|
804
|
+
context_id=context_id,
|
|
805
|
+
append=has_streaming_content,
|
|
806
|
+
last_chunk=True,
|
|
807
|
+
artifact_name=artifact_name,
|
|
808
|
+
metadata=final_metadata,
|
|
809
|
+
)
|
|
810
|
+
|
|
811
|
+
# Complete the task via status enqueue to preserve metadata
|
|
812
|
+
completion_message = "Task completed successfully."
|
|
813
|
+
completion_metadata = final_metadata.copy()
|
|
814
|
+
completion_metadata.pop(MetadataFieldKeys.TOOL_INFO, None)
|
|
815
|
+
completion_metadata.pop(MetadataFieldKeys.THINKING_AND_ACTIVITY_INFO, None)
|
|
816
|
+
await self._update_status(
|
|
817
|
+
updater,
|
|
818
|
+
TaskState.completed,
|
|
819
|
+
new_agent_text_message(completion_message, context_id=context_id, task_id=task_id),
|
|
820
|
+
params=StatusUpdateParams(metadata=completion_metadata, final=True, task_id=task_id, context_id=context_id),
|
|
821
|
+
)
|
|
822
|
+
return True
|
|
823
|
+
|
|
824
|
+
async def _handle_status_update_event(
|
|
825
|
+
self, chunk: A2AEvent, updater: TaskUpdater, task_id: str, context_id: str
|
|
826
|
+
) -> bool:
|
|
827
|
+
"""Handle STATUS_UPDATE event by sending generic status message.
|
|
828
|
+
|
|
829
|
+
Args:
|
|
830
|
+
chunk: A2AEvent with STATUS_UPDATE type.
|
|
831
|
+
updater: TaskUpdater for sending status updates.
|
|
832
|
+
task_id: Task identifier.
|
|
833
|
+
context_id: Context identifier.
|
|
834
|
+
|
|
835
|
+
Returns:
|
|
836
|
+
bool: False to continue stream processing.
|
|
837
|
+
"""
|
|
838
|
+
# Include metadata for status updates as well, so clients can trace step_ids
|
|
839
|
+
final_metadata = self._prepare_event_metadata(chunk, None)
|
|
840
|
+
message = Message(
|
|
841
|
+
role=Role.agent,
|
|
842
|
+
parts=[Part(root=TextPart(text=chunk["content"]))],
|
|
843
|
+
messageId=str(uuid.uuid4()),
|
|
844
|
+
taskId=task_id,
|
|
845
|
+
contextId=context_id,
|
|
846
|
+
)
|
|
847
|
+
await self._update_status(
|
|
848
|
+
updater,
|
|
849
|
+
TaskState.working,
|
|
850
|
+
message,
|
|
851
|
+
StatusUpdateParams(metadata=final_metadata, task_id=task_id, context_id=context_id),
|
|
852
|
+
)
|
|
853
|
+
return False
|
|
854
|
+
|
|
855
|
+
async def _handle_error_event(self, chunk: A2AEvent, updater: TaskUpdater, task_id: str, context_id: str) -> bool:
|
|
856
|
+
"""Handle ERROR event by failing the task.
|
|
857
|
+
|
|
858
|
+
Args:
|
|
859
|
+
chunk: A2AEvent with ERROR type and error details.
|
|
860
|
+
updater: TaskUpdater for task failure.
|
|
861
|
+
task_id: Task identifier.
|
|
862
|
+
context_id: Context identifier.
|
|
863
|
+
|
|
864
|
+
Returns:
|
|
865
|
+
bool: True to terminate stream processing.
|
|
866
|
+
"""
|
|
867
|
+
await self._update_status(
|
|
868
|
+
updater,
|
|
869
|
+
TaskState.failed,
|
|
870
|
+
new_agent_text_message(chunk["content"], context_id=context_id, task_id=task_id),
|
|
871
|
+
params=StatusUpdateParams(final=True),
|
|
872
|
+
)
|
|
873
|
+
return True
|
|
874
|
+
|
|
875
|
+
async def _handle_step_limit_exceeded_event(
|
|
876
|
+
self, chunk: A2AEvent, updater: TaskUpdater, task_id: str, context_id: str
|
|
877
|
+
) -> bool:
|
|
878
|
+
"""Handle step limit exceeded events by failing the task with metadata.
|
|
879
|
+
|
|
880
|
+
Args:
|
|
881
|
+
chunk: A2AEvent payload describing the step limit exceed event,
|
|
882
|
+
including content and optional metadata.
|
|
883
|
+
updater: TaskUpdater used to emit status updates to the A2A server.
|
|
884
|
+
task_id: Identifier of the task whose step limit was exceeded.
|
|
885
|
+
context_id: Context identifier associated with the task.
|
|
886
|
+
|
|
887
|
+
Returns:
|
|
888
|
+
bool: True to terminate further stream processing for the task.
|
|
889
|
+
"""
|
|
890
|
+
final_metadata = self._prepare_event_metadata(chunk, None)
|
|
891
|
+
message_text = chunk.get("content") or "Agent exceeded the configured step limit."
|
|
892
|
+
|
|
893
|
+
await self._update_status(
|
|
894
|
+
updater,
|
|
895
|
+
TaskState.failed,
|
|
896
|
+
new_agent_text_message(message_text, context_id=context_id, task_id=task_id),
|
|
897
|
+
params=StatusUpdateParams(final=True, metadata=final_metadata, task_id=task_id, context_id=context_id),
|
|
898
|
+
)
|
|
899
|
+
return True
|
|
900
|
+
|
|
901
|
+
def _extract_tool_result_status_message(self, chunk: A2AEvent) -> str:
|
|
902
|
+
"""Extract status message for tool completion from A2AEvent.
|
|
903
|
+
|
|
904
|
+
Args:
|
|
905
|
+
chunk: A2AEvent with TOOL_RESULT type and execution details.
|
|
906
|
+
|
|
907
|
+
Returns:
|
|
908
|
+
str: Human-readable status message for tool completion.
|
|
909
|
+
"""
|
|
910
|
+
content = chunk.get("content")
|
|
911
|
+
if isinstance(content, str) and content.strip():
|
|
912
|
+
return content
|
|
913
|
+
|
|
914
|
+
tool_info = chunk.get(MetadataFieldKeys.TOOL_INFO)
|
|
915
|
+
if tool_info and tool_info.get("name"):
|
|
916
|
+
return f"Completed {tool_info['name']}"
|
|
917
|
+
# Fall back to the generic completion message used when tasks end silently.
|
|
918
|
+
return "Task completed successfully."
|
|
919
|
+
|
|
920
|
+
@abstractmethod
|
|
921
|
+
async def execute(
|
|
922
|
+
self,
|
|
923
|
+
context: RequestContext,
|
|
924
|
+
event_queue: EventQueue,
|
|
925
|
+
) -> None:
|
|
926
|
+
"""Processes an incoming agent request and manages its execution.
|
|
927
|
+
|
|
928
|
+
Implementations should interact with the underlying agent (e.g., a LangGraph
|
|
929
|
+
or Google ADK agent) based on the provided `context`. All communications
|
|
930
|
+
regarding task status, artifacts, and completion must be sent through
|
|
931
|
+
the `event_queue`.
|
|
932
|
+
|
|
933
|
+
This method typically involves:
|
|
934
|
+
1. Calling `_handle_initial_execute_checks` for validation and setup.
|
|
935
|
+
2. Defining an agent-specific coroutine for processing (e.g., `_process_stream`).
|
|
936
|
+
3. Calling `_execute_agent_processing` to manage the lifecycle of this coroutine.
|
|
937
|
+
|
|
938
|
+
Args:
|
|
939
|
+
context (RequestContext): The request context containing information about the incoming
|
|
940
|
+
message, task, and other relevant data.
|
|
941
|
+
event_queue (EventQueue): The queue used to send events (e.g., task status updates,
|
|
942
|
+
artifacts) back to the A2A server infrastructure.
|
|
943
|
+
"""
|
|
944
|
+
raise NotImplementedError("Concrete A2A executors must implement the 'execute' method.")
|
|
945
|
+
|
|
946
|
+
async def cancel(self, context: RequestContext, event_queue: EventQueue) -> None:
|
|
947
|
+
"""Handles a request to cancel an ongoing agent task.
|
|
948
|
+
|
|
949
|
+
This method attempts to cancel an active asyncio.Task associated with the
|
|
950
|
+
given `context.task_id`. It waits for a short period for the task to handle
|
|
951
|
+
the cancellation gracefully. The `event_queue` is used to report the
|
|
952
|
+
outcome of the cancellation attempt (e.g., success, error during cleanup).
|
|
953
|
+
|
|
954
|
+
Args:
|
|
955
|
+
context (RequestContext): The request context for the task to be cancelled,
|
|
956
|
+
primarily used to get the `task_id` and `context_id`.
|
|
957
|
+
event_queue (EventQueue): The queue for sending cancellation status events.
|
|
958
|
+
"""
|
|
959
|
+
task_id = context.task_id
|
|
960
|
+
task = self._active_tasks.get(task_id)
|
|
961
|
+
updater = TaskUpdater(event_queue, task_id, context.context_id)
|
|
962
|
+
|
|
963
|
+
cancelled_by_client = False
|
|
964
|
+
handled = False
|
|
965
|
+
cancelled_error: asyncio.CancelledError | None = None
|
|
966
|
+
if task and not task.done():
|
|
967
|
+
logger.info(f"Attempting to cancel task {task_id} due to client request.")
|
|
968
|
+
task.cancel()
|
|
969
|
+
cancelled_by_client = True
|
|
970
|
+
cancelled_error, handled = await self._request_task_cancellation(task, task_id, context, updater)
|
|
971
|
+
if handled:
|
|
972
|
+
return
|
|
973
|
+
|
|
974
|
+
self._remove_active_task(task_id)
|
|
975
|
+
await self._emit_cancellation_status(context, updater, task, cancelled_by_client)
|
|
976
|
+
|
|
977
|
+
if cancelled_error is not None:
|
|
978
|
+
raise cancelled_error
|
|
979
|
+
|
|
980
|
+
async def _request_task_cancellation(
|
|
981
|
+
self,
|
|
982
|
+
task: asyncio.Task,
|
|
983
|
+
task_id: str,
|
|
984
|
+
context: RequestContext,
|
|
985
|
+
updater: TaskUpdater,
|
|
986
|
+
) -> tuple[asyncio.CancelledError | None, bool]:
|
|
987
|
+
"""Wait for a cancelled task to finish any cleanup.
|
|
988
|
+
|
|
989
|
+
Args:
|
|
990
|
+
task: The asyncio.Task that was cancelled.
|
|
991
|
+
task_id: The ID of the task being cancelled.
|
|
992
|
+
context: The RequestContext for the cancellation request.
|
|
993
|
+
updater: The TaskUpdater for sending status updates.
|
|
994
|
+
|
|
995
|
+
Returns:
|
|
996
|
+
A tuple containing:
|
|
997
|
+
- The asyncio.CancelledError if the task surfaced one while waiting.
|
|
998
|
+
- A boolean indicating whether the caller should stop further processing
|
|
999
|
+
because the helper already handled status updates and cleanup.
|
|
1000
|
+
"""
|
|
1001
|
+
try:
|
|
1002
|
+
await asyncio.wait_for(asyncio.shield(task), timeout=5.0)
|
|
1003
|
+
logger.info(f"Task {task_id} completed its execution or cleanup after cancellation request.")
|
|
1004
|
+
return None, False
|
|
1005
|
+
except asyncio.CancelledError as exc:
|
|
1006
|
+
logger.info(f"Task {task_id} was externally cancelled and finished its cancellation logic or timed out.")
|
|
1007
|
+
return exc, False
|
|
1008
|
+
except TimeoutError:
|
|
1009
|
+
logger.warning(
|
|
1010
|
+
f"Timeout waiting for task {task_id} to complete after cancellation request. "
|
|
1011
|
+
"It might still be running cleanup in the background or may not have handled cancellation properly."
|
|
1012
|
+
)
|
|
1013
|
+
return None, False
|
|
1014
|
+
except Exception as error: # noqa: BLE001
|
|
1015
|
+
logger.error(
|
|
1016
|
+
f"Error encountered while waiting for cancelled task {task_id} to finish: {error}",
|
|
1017
|
+
exc_info=True,
|
|
1018
|
+
)
|
|
1019
|
+
await self._update_status(
|
|
1020
|
+
updater,
|
|
1021
|
+
TaskState.canceled,
|
|
1022
|
+
message=new_agent_text_message(
|
|
1023
|
+
f"Task cancelled, but an error occurred during its cleanup: {str(error)}"
|
|
1024
|
+
),
|
|
1025
|
+
params=StatusUpdateParams(task_id=task_id, context_id=context.context_id),
|
|
1026
|
+
)
|
|
1027
|
+
self._remove_active_task(task_id)
|
|
1028
|
+
return None, True
|
|
1029
|
+
|
|
1030
|
+
async def _emit_cancellation_status(
|
|
1031
|
+
self,
|
|
1032
|
+
context: RequestContext,
|
|
1033
|
+
updater: TaskUpdater,
|
|
1034
|
+
task: asyncio.Task | None,
|
|
1035
|
+
cancelled_by_client: bool,
|
|
1036
|
+
) -> None:
|
|
1037
|
+
"""Emit the final cancellation status based on task state.
|
|
1038
|
+
|
|
1039
|
+
Args:
|
|
1040
|
+
context: The RequestContext for the cancellation request.
|
|
1041
|
+
updater: The TaskUpdater for sending status updates.
|
|
1042
|
+
task: The asyncio.Task that was cancelled, or None.
|
|
1043
|
+
cancelled_by_client: Whether the cancellation was requested by the client.
|
|
1044
|
+
"""
|
|
1045
|
+
if cancelled_by_client:
|
|
1046
|
+
await self._update_status(
|
|
1047
|
+
updater,
|
|
1048
|
+
TaskState.canceled,
|
|
1049
|
+
message=new_agent_text_message("Task cancelled successfully by client request."),
|
|
1050
|
+
params=StatusUpdateParams(task_id=context.task_id, context_id=context.context_id),
|
|
1051
|
+
)
|
|
1052
|
+
return
|
|
1053
|
+
|
|
1054
|
+
if task and task.cancelled():
|
|
1055
|
+
await self._update_status(
|
|
1056
|
+
updater,
|
|
1057
|
+
TaskState.canceled,
|
|
1058
|
+
message=new_agent_text_message("Task was found to be already cancelled."),
|
|
1059
|
+
params=StatusUpdateParams(task_id=context.task_id, context_id=context.context_id),
|
|
1060
|
+
)
|
|
1061
|
+
return
|
|
1062
|
+
|
|
1063
|
+
if task and task.done() and not task.cancelled():
|
|
1064
|
+
logger.info(f"Task {context.task_id} was already done (completed/failed) when cancel was processed.")
|
|
1065
|
+
current_task = context.current_task
|
|
1066
|
+
if current_task and current_task.status not in (
|
|
1067
|
+
TaskState.completed,
|
|
1068
|
+
TaskState.failed,
|
|
1069
|
+
TaskState.canceled,
|
|
1070
|
+
):
|
|
1071
|
+
await self._update_status(
|
|
1072
|
+
updater,
|
|
1073
|
+
TaskState.canceled,
|
|
1074
|
+
message=new_agent_text_message("Task was already done but marked as cancelled per request."),
|
|
1075
|
+
params=StatusUpdateParams(task_id=context.task_id, context_id=context.context_id),
|
|
1076
|
+
)
|
|
1077
|
+
return
|
|
1078
|
+
|
|
1079
|
+
await self._update_status(
|
|
1080
|
+
updater,
|
|
1081
|
+
TaskState.canceled,
|
|
1082
|
+
message=new_agent_text_message(
|
|
1083
|
+
"Task cancellation processed; task was not actively running or already handled."
|
|
1084
|
+
),
|
|
1085
|
+
params=StatusUpdateParams(task_id=context.task_id, context_id=context.context_id),
|
|
1086
|
+
)
|