solace-agent-mesh 0.2.4__py3-none-any.whl → 1.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of solace-agent-mesh might be problematic. Click here for more details.
- solace_agent_mesh/agent/adk/adk_llm.txt +93 -0
- solace_agent_mesh/agent/adk/app_llm_agent.py +26 -0
- solace_agent_mesh/agent/adk/callbacks.py +1694 -0
- solace_agent_mesh/agent/adk/filesystem_artifact_service.py +381 -0
- solace_agent_mesh/agent/adk/invocation_monitor.py +295 -0
- solace_agent_mesh/agent/adk/models/lite_llm.py +872 -0
- solace_agent_mesh/agent/adk/models/models_llm.txt +94 -0
- solace_agent_mesh/agent/adk/runner.py +353 -0
- solace_agent_mesh/agent/adk/services.py +240 -0
- solace_agent_mesh/agent/adk/setup.py +751 -0
- solace_agent_mesh/agent/adk/stream_parser.py +214 -0
- solace_agent_mesh/agent/adk/tool_wrapper.py +139 -0
- solace_agent_mesh/agent/agent_llm.txt +41 -0
- solace_agent_mesh/agent/protocol/event_handlers.py +1469 -0
- solace_agent_mesh/agent/protocol/protocol_llm.txt +21 -0
- solace_agent_mesh/agent/sac/app.py +640 -0
- solace_agent_mesh/agent/sac/component.py +3388 -0
- solace_agent_mesh/agent/sac/patch_adk.py +111 -0
- solace_agent_mesh/agent/sac/sac_llm.txt +105 -0
- solace_agent_mesh/agent/sac/task_execution_context.py +176 -0
- solace_agent_mesh/agent/testing/__init__.py +3 -0
- solace_agent_mesh/agent/testing/debug_utils.py +135 -0
- solace_agent_mesh/agent/testing/testing_llm.txt +90 -0
- solace_agent_mesh/agent/tools/__init__.py +14 -0
- solace_agent_mesh/agent/tools/audio_tools.py +1622 -0
- solace_agent_mesh/agent/tools/builtin_artifact_tools.py +1954 -0
- solace_agent_mesh/agent/tools/builtin_data_analysis_tools.py +238 -0
- solace_agent_mesh/agent/tools/general_agent_tools.py +569 -0
- solace_agent_mesh/agent/tools/image_tools.py +1184 -0
- solace_agent_mesh/agent/tools/peer_agent_tool.py +289 -0
- solace_agent_mesh/agent/tools/registry.py +36 -0
- solace_agent_mesh/agent/tools/test_tools.py +135 -0
- solace_agent_mesh/agent/tools/tool_definition.py +45 -0
- solace_agent_mesh/agent/tools/tools_llm.txt +104 -0
- solace_agent_mesh/agent/tools/web_tools.py +381 -0
- solace_agent_mesh/agent/utils/artifact_helpers.py +927 -0
- solace_agent_mesh/agent/utils/config_parser.py +47 -0
- solace_agent_mesh/agent/utils/context_helpers.py +60 -0
- solace_agent_mesh/agent/utils/utils_llm.txt +153 -0
- solace_agent_mesh/assets/docs/404.html +16 -0
- solace_agent_mesh/assets/docs/assets/css/styles.906a1503.css +1 -0
- solace_agent_mesh/assets/docs/assets/images/Solace_AI_Framework_With_Broker-85f0a306a9bcdd20b390b7a949f6d862.png +0 -0
- solace_agent_mesh/assets/docs/assets/images/sac-flows-80d5b603c6aafd33e87945680ce0abf3.png +0 -0
- solace_agent_mesh/assets/docs/assets/images/sac_parts_of_a_component-cb3d0424b1d0c17734c5435cca6b4082.png +0 -0
- solace_agent_mesh/assets/docs/assets/js/04989206.674a8007.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/0e682baa.79f0ab22.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/1001.0182a8bd.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/1023fc19.015679ca.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/1039.0bd46aa1.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/149.b797a808.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/1523c6b4.91c7bc01.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/165.6a39807d.js +2 -0
- solace_agent_mesh/assets/docs/assets/js/165.6a39807d.js.LICENSE.txt +9 -0
- solace_agent_mesh/assets/docs/assets/js/166ab619.7d97ccaf.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/17896441.a5e82f9b.js +2 -0
- solace_agent_mesh/assets/docs/assets/js/17896441.a5e82f9b.js.LICENSE.txt +7 -0
- solace_agent_mesh/assets/docs/assets/js/1c6e87d2.23bccffb.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/2130.ab9fd314.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/21ceee5f.614fa8dd.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/2237.5e477fc6.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/2334.622a6395.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/2a9cab12.8909df92.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/3219.adc1d663.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/332e10b5.7a103f42.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/3624.b524e433.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/375.708d48db.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/3834.b6cd790e.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/3d406171.f722eaf5.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/4250.95455b28.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/42b3f8d8.36090198.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/4356.d169ab5b.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/442a8107.5ba94b65.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/4458.518e66fa.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/4488.c7cc3442.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/4494.6ee23046.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/4855.fc4444b6.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/4866.22daefc0.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/4950.ca4caeda.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/4c2787c2.66ee00e9.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/5388.7a136447.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/55f47984.c484bf96.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/5607.081356f8.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/5864.b0d0e9de.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/5b4258a4.bda20761.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/5e95c892.558d5167.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/6143.0a1464c9.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/6395.e9c73649.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/6796.51d2c9b7.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/6976.379be23b.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/6978.ee0b945c.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/7040.cb436723.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/7195.412f418a.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/7280.3fb73bdb.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/768e31b0.a12673db.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/7845.e33e7c4c.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/7900.69516146.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/8356.8a379c04.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/85387663.6bf41934.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/8567.4732c6b7.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/8573.cb04eda5.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/8577.1d54e766.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/8591.d7c16be6.js +2 -0
- solace_agent_mesh/assets/docs/assets/js/8591.d7c16be6.js.LICENSE.txt +61 -0
- solace_agent_mesh/assets/docs/assets/js/8709.7ecd4047.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/8731.49e930c2.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/8908.f9d1b506.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/9157.b4093d07.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/9278.a4fd875d.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/945fb41e.74d728aa.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/9616.b75c2f6d.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/9793.c6d16376.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/9eff14a2.1bf8f61c.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/a3a92b25.26ca071f.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/a7bd4aaa.2204d2f7.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/a94703ab.0438dbc2.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/aba21aa0.c42a534c.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/aba87c2f.d3e2dcc3.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/ae4415af.8e279b5d.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/b7006a3a.40b10c9d.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/bac0be12.f50d9bac.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/bb2ef573.207e6990.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/c2c06897.63b76e9e.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/cc969b05.954186d4.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/cd3d4052.ca6eed8c.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/ced92a13.fb92e7ca.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/cee5d587.f5b73ca1.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/f284c35a.ecc3d195.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/f897a61a.2c2e152c.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/fbfa3e75.aca209c9.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/main.7ed3319f.js +2 -0
- solace_agent_mesh/assets/docs/assets/js/main.7ed3319f.js.LICENSE.txt +81 -0
- solace_agent_mesh/assets/docs/assets/js/runtime~main.d9520ae2.js +1 -0
- solace_agent_mesh/assets/docs/docs/documentation/concepts/agents/index.html +128 -0
- solace_agent_mesh/assets/docs/docs/documentation/concepts/architecture/index.html +91 -0
- solace_agent_mesh/assets/docs/docs/documentation/concepts/cli/index.html +201 -0
- solace_agent_mesh/assets/docs/docs/documentation/concepts/gateways/index.html +91 -0
- solace_agent_mesh/assets/docs/docs/documentation/concepts/orchestrator/index.html +55 -0
- solace_agent_mesh/assets/docs/docs/documentation/concepts/plugins/index.html +82 -0
- solace_agent_mesh/assets/docs/docs/documentation/deployment/debugging/index.html +60 -0
- solace_agent_mesh/assets/docs/docs/documentation/deployment/deploy/index.html +48 -0
- solace_agent_mesh/assets/docs/docs/documentation/deployment/observability/index.html +54 -0
- solace_agent_mesh/assets/docs/docs/documentation/enterprise/index.html +17 -0
- solace_agent_mesh/assets/docs/docs/documentation/getting-started/component-overview/index.html +45 -0
- solace_agent_mesh/assets/docs/docs/documentation/getting-started/installation/index.html +76 -0
- solace_agent_mesh/assets/docs/docs/documentation/getting-started/introduction/index.html +150 -0
- solace_agent_mesh/assets/docs/docs/documentation/getting-started/quick-start/index.html +54 -0
- solace_agent_mesh/assets/docs/docs/documentation/tutorials/bedrock-agents/index.html +267 -0
- solace_agent_mesh/assets/docs/docs/documentation/tutorials/custom-agent/index.html +136 -0
- solace_agent_mesh/assets/docs/docs/documentation/tutorials/event-mesh-gateway/index.html +116 -0
- solace_agent_mesh/assets/docs/docs/documentation/tutorials/mcp-integration/index.html +80 -0
- solace_agent_mesh/assets/docs/docs/documentation/tutorials/mongodb-integration/index.html +164 -0
- solace_agent_mesh/assets/docs/docs/documentation/tutorials/rest-gateway/index.html +57 -0
- solace_agent_mesh/assets/docs/docs/documentation/tutorials/slack-integration/index.html +72 -0
- solace_agent_mesh/assets/docs/docs/documentation/tutorials/sql-database/index.html +102 -0
- solace_agent_mesh/assets/docs/docs/documentation/user-guide/builtin-tools/artifact-management/index.html +99 -0
- solace_agent_mesh/assets/docs/docs/documentation/user-guide/builtin-tools/audio-tools/index.html +90 -0
- solace_agent_mesh/assets/docs/docs/documentation/user-guide/builtin-tools/data-analysis-tools/index.html +107 -0
- solace_agent_mesh/assets/docs/docs/documentation/user-guide/builtin-tools/embeds/index.html +152 -0
- solace_agent_mesh/assets/docs/docs/documentation/user-guide/builtin-tools/index.html +103 -0
- solace_agent_mesh/assets/docs/docs/documentation/user-guide/create-agents/index.html +170 -0
- solace_agent_mesh/assets/docs/docs/documentation/user-guide/create-gateways/index.html +200 -0
- solace_agent_mesh/assets/docs/docs/documentation/user-guide/creating-service-providers/index.html +54 -0
- solace_agent_mesh/assets/docs/docs/documentation/user-guide/solace-ai-connector/index.html +69 -0
- solace_agent_mesh/assets/docs/docs/documentation/user-guide/structure/index.html +59 -0
- solace_agent_mesh/assets/docs/img/Solace_AI_Framework_README.png +0 -0
- solace_agent_mesh/assets/docs/img/Solace_AI_Framework_With_Broker.png +0 -0
- solace_agent_mesh/assets/docs/img/logo.png +0 -0
- solace_agent_mesh/assets/docs/img/sac-flows.png +0 -0
- solace_agent_mesh/assets/docs/img/sac_parts_of_a_component.png +0 -0
- solace_agent_mesh/assets/docs/img/solace-logo.png +0 -0
- solace_agent_mesh/assets/docs/lunr-index-1753813536522.json +1 -0
- solace_agent_mesh/assets/docs/lunr-index.json +1 -0
- solace_agent_mesh/assets/docs/search-doc-1753813536522.json +1 -0
- solace_agent_mesh/assets/docs/search-doc.json +1 -0
- solace_agent_mesh/assets/docs/sitemap.xml +1 -0
- solace_agent_mesh/cli/__init__.py +1 -1
- solace_agent_mesh/cli/commands/add_cmd/__init__.py +15 -0
- solace_agent_mesh/cli/commands/add_cmd/add_cmd_llm.txt +250 -0
- solace_agent_mesh/cli/commands/add_cmd/agent_cmd.py +659 -0
- solace_agent_mesh/cli/commands/add_cmd/gateway_cmd.py +322 -0
- solace_agent_mesh/cli/commands/add_cmd/web_add_agent_step.py +93 -0
- solace_agent_mesh/cli/commands/add_cmd/web_add_gateway_step.py +118 -0
- solace_agent_mesh/cli/commands/docs_cmd.py +57 -0
- solace_agent_mesh/cli/commands/eval_cmd.py +64 -0
- solace_agent_mesh/cli/commands/init_cmd/__init__.py +404 -0
- solace_agent_mesh/cli/commands/init_cmd/broker_step.py +201 -0
- solace_agent_mesh/cli/commands/init_cmd/directory_step.py +28 -0
- solace_agent_mesh/cli/commands/init_cmd/env_step.py +197 -0
- solace_agent_mesh/cli/commands/init_cmd/init_cmd_llm.txt +365 -0
- solace_agent_mesh/cli/commands/init_cmd/orchestrator_step.py +387 -0
- solace_agent_mesh/cli/commands/init_cmd/project_files_step.py +38 -0
- solace_agent_mesh/cli/commands/init_cmd/web_init_step.py +110 -0
- solace_agent_mesh/cli/commands/init_cmd/webui_gateway_step.py +183 -0
- solace_agent_mesh/cli/commands/plugin_cmd/__init__.py +18 -0
- solace_agent_mesh/cli/commands/plugin_cmd/add_cmd.py +372 -0
- solace_agent_mesh/cli/commands/plugin_cmd/build_cmd.py +86 -0
- solace_agent_mesh/cli/commands/plugin_cmd/catalog_cmd.py +138 -0
- solace_agent_mesh/cli/commands/plugin_cmd/create_cmd.py +309 -0
- solace_agent_mesh/cli/commands/plugin_cmd/official_registry.py +174 -0
- solace_agent_mesh/cli/commands/plugin_cmd/plugin_cmd_llm.txt +305 -0
- solace_agent_mesh/cli/commands/run_cmd.py +158 -0
- solace_agent_mesh/cli/main.py +17 -294
- solace_agent_mesh/cli/utils.py +135 -204
- solace_agent_mesh/client/webui/frontend/static/assets/authCallback-DvlO62me.js +1 -0
- solace_agent_mesh/client/webui/frontend/static/assets/client-bp6u3qVZ.js +49 -0
- solace_agent_mesh/client/webui/frontend/static/assets/favicon-BLgzUch9.ico +0 -0
- solace_agent_mesh/client/webui/frontend/static/assets/main-An0a5j5k.js +663 -0
- solace_agent_mesh/client/webui/frontend/static/assets/main-Bu5-4Bac.css +1 -0
- solace_agent_mesh/client/webui/frontend/static/auth-callback.html +14 -0
- solace_agent_mesh/client/webui/frontend/static/index.html +15 -0
- solace_agent_mesh/common/__init__.py +1 -0
- solace_agent_mesh/common/a2a_protocol.py +564 -0
- solace_agent_mesh/common/agent_registry.py +42 -0
- solace_agent_mesh/common/client/__init__.py +4 -0
- solace_agent_mesh/common/client/card_resolver.py +21 -0
- solace_agent_mesh/common/client/client.py +85 -0
- solace_agent_mesh/common/client/client_llm.txt +133 -0
- solace_agent_mesh/common/common_llm.txt +144 -0
- solace_agent_mesh/common/constants.py +1 -14
- solace_agent_mesh/common/middleware/__init__.py +12 -0
- solace_agent_mesh/common/middleware/config_resolver.py +130 -0
- solace_agent_mesh/common/middleware/middleware_llm.txt +174 -0
- solace_agent_mesh/common/middleware/registry.py +125 -0
- solace_agent_mesh/common/server/__init__.py +4 -0
- solace_agent_mesh/common/server/server.py +122 -0
- solace_agent_mesh/common/server/server_llm.txt +169 -0
- solace_agent_mesh/common/server/task_manager.py +291 -0
- solace_agent_mesh/common/server/utils.py +28 -0
- solace_agent_mesh/common/services/__init__.py +4 -0
- solace_agent_mesh/common/services/employee_service.py +162 -0
- solace_agent_mesh/common/services/identity_service.py +129 -0
- solace_agent_mesh/common/services/providers/__init__.py +4 -0
- solace_agent_mesh/common/services/providers/local_file_identity_service.py +148 -0
- solace_agent_mesh/common/services/providers/providers_llm.txt +113 -0
- solace_agent_mesh/common/services/services_llm.txt +132 -0
- solace_agent_mesh/common/types.py +411 -0
- solace_agent_mesh/common/utils/__init__.py +7 -0
- solace_agent_mesh/common/utils/asyncio_macos_fix.py +86 -0
- solace_agent_mesh/common/utils/embeds/__init__.py +33 -0
- solace_agent_mesh/common/utils/embeds/constants.py +55 -0
- solace_agent_mesh/common/utils/embeds/converter.py +452 -0
- solace_agent_mesh/common/utils/embeds/embeds_llm.txt +124 -0
- solace_agent_mesh/common/utils/embeds/evaluators.py +394 -0
- solace_agent_mesh/common/utils/embeds/modifiers.py +816 -0
- solace_agent_mesh/common/utils/embeds/resolver.py +865 -0
- solace_agent_mesh/common/utils/embeds/types.py +14 -0
- solace_agent_mesh/common/utils/in_memory_cache.py +108 -0
- solace_agent_mesh/common/utils/log_formatters.py +44 -0
- solace_agent_mesh/common/utils/mime_helpers.py +106 -0
- solace_agent_mesh/common/utils/push_notification_auth.py +134 -0
- solace_agent_mesh/common/utils/utils_llm.txt +67 -0
- solace_agent_mesh/config_portal/backend/common.py +66 -24
- solace_agent_mesh/config_portal/backend/plugin_catalog/constants.py +23 -0
- solace_agent_mesh/config_portal/backend/plugin_catalog/models.py +49 -0
- solace_agent_mesh/config_portal/backend/plugin_catalog/registry_manager.py +160 -0
- solace_agent_mesh/config_portal/backend/plugin_catalog/scraper.py +525 -0
- solace_agent_mesh/config_portal/backend/plugin_catalog_server.py +216 -0
- solace_agent_mesh/config_portal/backend/server.py +550 -181
- solace_agent_mesh/config_portal/frontend/static/client/assets/_index-DNxCwAGB.js +48 -0
- solace_agent_mesh/config_portal/frontend/static/client/assets/components-B7lKcHVY.js +140 -0
- solace_agent_mesh/config_portal/frontend/static/client/assets/{entry.client-DX1misIU.js → entry.client-CEumGClk.js} +3 -3
- solace_agent_mesh/config_portal/frontend/static/client/assets/index-DSo1AH_7.js +68 -0
- solace_agent_mesh/config_portal/frontend/static/client/assets/manifest-d2b54a97.js +1 -0
- solace_agent_mesh/config_portal/frontend/static/client/assets/{root-BApq5dPK.js → root-C4XmHinv.js} +2 -2
- solace_agent_mesh/config_portal/frontend/static/client/assets/root-DxRwaWiE.css +1 -0
- solace_agent_mesh/config_portal/frontend/static/client/index.html +3 -3
- solace_agent_mesh/core_a2a/__init__.py +1 -0
- solace_agent_mesh/core_a2a/core_a2a_llm.txt +88 -0
- solace_agent_mesh/core_a2a/service.py +331 -0
- solace_agent_mesh/evaluation/config_loader.py +657 -0
- solace_agent_mesh/evaluation/evaluator.py +667 -0
- solace_agent_mesh/evaluation/message_organizer.py +568 -0
- solace_agent_mesh/evaluation/report/benchmark_info.html +35 -0
- solace_agent_mesh/evaluation/report/chart_section.html +141 -0
- solace_agent_mesh/evaluation/report/detailed_breakdown.html +28 -0
- solace_agent_mesh/evaluation/report/modal.html +59 -0
- solace_agent_mesh/evaluation/report/modal_chart_functions.js +411 -0
- solace_agent_mesh/evaluation/report/modal_script.js +296 -0
- solace_agent_mesh/evaluation/report/modal_styles.css +340 -0
- solace_agent_mesh/evaluation/report/performance_metrics_styles.css +93 -0
- solace_agent_mesh/evaluation/report/templates/footer.html +2 -0
- solace_agent_mesh/evaluation/report/templates/header.html +340 -0
- solace_agent_mesh/evaluation/report_data_processor.py +972 -0
- solace_agent_mesh/evaluation/report_generator.py +613 -0
- solace_agent_mesh/evaluation/run.py +613 -0
- solace_agent_mesh/evaluation/subscriber.py +872 -0
- solace_agent_mesh/evaluation/summary_builder.py +775 -0
- solace_agent_mesh/evaluation/test_case_loader.py +714 -0
- solace_agent_mesh/gateway/base/__init__.py +1 -0
- solace_agent_mesh/gateway/base/app.py +266 -0
- solace_agent_mesh/gateway/base/base_llm.txt +119 -0
- solace_agent_mesh/gateway/base/component.py +1542 -0
- solace_agent_mesh/gateway/base/task_context.py +74 -0
- solace_agent_mesh/gateway/gateway_llm.txt +125 -0
- solace_agent_mesh/gateway/http_sse/app.py +190 -0
- solace_agent_mesh/gateway/http_sse/component.py +1602 -0
- solace_agent_mesh/gateway/http_sse/components/__init__.py +7 -0
- solace_agent_mesh/gateway/http_sse/components/components_llm.txt +65 -0
- solace_agent_mesh/gateway/http_sse/components/visualization_forwarder_component.py +108 -0
- solace_agent_mesh/gateway/http_sse/dependencies.py +316 -0
- solace_agent_mesh/gateway/http_sse/http_sse_llm.txt +63 -0
- solace_agent_mesh/gateway/http_sse/main.py +442 -0
- solace_agent_mesh/gateway/http_sse/routers/__init__.py +4 -0
- solace_agent_mesh/gateway/http_sse/routers/agents.py +41 -0
- solace_agent_mesh/gateway/http_sse/routers/artifacts.py +821 -0
- solace_agent_mesh/gateway/http_sse/routers/auth.py +212 -0
- solace_agent_mesh/gateway/http_sse/routers/config.py +55 -0
- solace_agent_mesh/gateway/http_sse/routers/people.py +69 -0
- solace_agent_mesh/gateway/http_sse/routers/routers_llm.txt +37 -0
- solace_agent_mesh/gateway/http_sse/routers/sessions.py +80 -0
- solace_agent_mesh/gateway/http_sse/routers/sse.py +138 -0
- solace_agent_mesh/gateway/http_sse/routers/tasks.py +294 -0
- solace_agent_mesh/gateway/http_sse/routers/users.py +59 -0
- solace_agent_mesh/gateway/http_sse/routers/visualization.py +1131 -0
- solace_agent_mesh/gateway/http_sse/services/__init__.py +4 -0
- solace_agent_mesh/gateway/http_sse/services/agent_service.py +69 -0
- solace_agent_mesh/gateway/http_sse/services/people_service.py +158 -0
- solace_agent_mesh/gateway/http_sse/services/services_llm.txt +179 -0
- solace_agent_mesh/gateway/http_sse/services/task_service.py +121 -0
- solace_agent_mesh/gateway/http_sse/session_manager.py +187 -0
- solace_agent_mesh/gateway/http_sse/sse_manager.py +328 -0
- solace_agent_mesh/llm.txt +228 -0
- solace_agent_mesh/llm_detail.txt +2835 -0
- solace_agent_mesh/templates/agent_template.yaml +53 -0
- solace_agent_mesh/templates/eval_backend_template.yaml +54 -0
- solace_agent_mesh/templates/gateway_app_template.py +73 -0
- solace_agent_mesh/templates/gateway_component_template.py +400 -0
- solace_agent_mesh/templates/gateway_config_template.yaml +43 -0
- solace_agent_mesh/templates/main_orchestrator.yaml +55 -0
- solace_agent_mesh/templates/plugin_agent_config_template.yaml +122 -0
- solace_agent_mesh/templates/plugin_custom_config_template.yaml +27 -0
- solace_agent_mesh/templates/plugin_custom_template.py +10 -0
- solace_agent_mesh/templates/plugin_gateway_config_template.yaml +63 -0
- solace_agent_mesh/templates/plugin_pyproject_template.toml +33 -0
- solace_agent_mesh/templates/plugin_readme_template.md +34 -0
- solace_agent_mesh/templates/plugin_tools_template.py +224 -0
- solace_agent_mesh/templates/shared_config.yaml +66 -0
- solace_agent_mesh/templates/templates_llm.txt +147 -0
- solace_agent_mesh/templates/webui.yaml +53 -0
- solace_agent_mesh-1.0.1.dist-info/METADATA +432 -0
- solace_agent_mesh-1.0.1.dist-info/RECORD +359 -0
- solace_agent_mesh-1.0.1.dist-info/entry_points.txt +3 -0
- {solace_agent_mesh-0.2.4.dist-info → solace_agent_mesh-1.0.1.dist-info}/licenses/LICENSE +1 -1
- solace_agent_mesh/agents/base_agent_component.py +0 -256
- solace_agent_mesh/agents/global/actions/agent_state_change.py +0 -54
- solace_agent_mesh/agents/global/actions/clear_history.py +0 -32
- solace_agent_mesh/agents/global/actions/convert_file_to_markdown.py +0 -160
- solace_agent_mesh/agents/global/actions/create_file.py +0 -70
- solace_agent_mesh/agents/global/actions/error_action.py +0 -45
- solace_agent_mesh/agents/global/actions/plantuml_diagram.py +0 -163
- solace_agent_mesh/agents/global/actions/plotly_graph.py +0 -152
- solace_agent_mesh/agents/global/actions/retrieve_file.py +0 -51
- solace_agent_mesh/agents/global/global_agent_component.py +0 -38
- solace_agent_mesh/agents/image_processing/actions/create_image.py +0 -75
- solace_agent_mesh/agents/image_processing/actions/describe_image.py +0 -115
- solace_agent_mesh/agents/image_processing/image_processing_agent_component.py +0 -23
- solace_agent_mesh/agents/slack/__init__.py +0 -1
- solace_agent_mesh/agents/slack/actions/__init__.py +0 -1
- solace_agent_mesh/agents/slack/actions/post_message.py +0 -177
- solace_agent_mesh/agents/slack/slack_agent_component.py +0 -59
- solace_agent_mesh/agents/web_request/actions/do_image_search.py +0 -84
- solace_agent_mesh/agents/web_request/actions/do_news_search.py +0 -47
- solace_agent_mesh/agents/web_request/actions/do_suggestion_search.py +0 -34
- solace_agent_mesh/agents/web_request/actions/do_web_request.py +0 -135
- solace_agent_mesh/agents/web_request/actions/download_file.py +0 -69
- solace_agent_mesh/agents/web_request/web_request_agent_component.py +0 -33
- solace_agent_mesh/assets/web-visualizer/assets/index-D0qORgkg.css +0 -1
- solace_agent_mesh/assets/web-visualizer/assets/index-DnDr1pnu.js +0 -109
- solace_agent_mesh/assets/web-visualizer/index.html +0 -14
- solace_agent_mesh/assets/web-visualizer/vite.svg +0 -1
- solace_agent_mesh/cli/commands/add/__init__.py +0 -3
- solace_agent_mesh/cli/commands/add/add.py +0 -88
- solace_agent_mesh/cli/commands/add/agent.py +0 -110
- solace_agent_mesh/cli/commands/add/copy_from_plugin.py +0 -92
- solace_agent_mesh/cli/commands/add/gateway.py +0 -374
- solace_agent_mesh/cli/commands/build.py +0 -670
- solace_agent_mesh/cli/commands/chat/__init__.py +0 -3
- solace_agent_mesh/cli/commands/chat/chat.py +0 -361
- solace_agent_mesh/cli/commands/config.py +0 -29
- solace_agent_mesh/cli/commands/init/__init__.py +0 -3
- solace_agent_mesh/cli/commands/init/ai_provider_step.py +0 -93
- solace_agent_mesh/cli/commands/init/broker_step.py +0 -99
- solace_agent_mesh/cli/commands/init/builtin_agent_step.py +0 -83
- solace_agent_mesh/cli/commands/init/check_if_already_done.py +0 -13
- solace_agent_mesh/cli/commands/init/create_config_file_step.py +0 -65
- solace_agent_mesh/cli/commands/init/create_other_project_files_step.py +0 -147
- solace_agent_mesh/cli/commands/init/file_service_step.py +0 -73
- solace_agent_mesh/cli/commands/init/init.py +0 -92
- solace_agent_mesh/cli/commands/init/project_structure_step.py +0 -16
- solace_agent_mesh/cli/commands/init/web_init_step.py +0 -32
- solace_agent_mesh/cli/commands/plugin/__init__.py +0 -3
- solace_agent_mesh/cli/commands/plugin/add.py +0 -100
- solace_agent_mesh/cli/commands/plugin/build.py +0 -268
- solace_agent_mesh/cli/commands/plugin/create.py +0 -117
- solace_agent_mesh/cli/commands/plugin/plugin.py +0 -124
- solace_agent_mesh/cli/commands/plugin/remove.py +0 -73
- solace_agent_mesh/cli/commands/run.py +0 -68
- solace_agent_mesh/cli/commands/visualizer.py +0 -138
- solace_agent_mesh/cli/config.py +0 -85
- solace_agent_mesh/common/action.py +0 -91
- solace_agent_mesh/common/action_list.py +0 -37
- solace_agent_mesh/common/action_response.py +0 -340
- solace_agent_mesh/common/mysql_database.py +0 -40
- solace_agent_mesh/common/postgres_database.py +0 -85
- solace_agent_mesh/common/prompt_templates.py +0 -28
- solace_agent_mesh/common/stimulus_utils.py +0 -152
- solace_agent_mesh/common/time.py +0 -24
- solace_agent_mesh/common/utils.py +0 -712
- solace_agent_mesh/config_portal/frontend/static/client/assets/_index-a-zJ6rLx.js +0 -46
- solace_agent_mesh/config_portal/frontend/static/client/assets/components-ZIfdTbrV.js +0 -191
- solace_agent_mesh/config_portal/frontend/static/client/assets/index-BJHAE5s4.js +0 -17
- solace_agent_mesh/config_portal/frontend/static/client/assets/manifest-44c41103.js +0 -1
- solace_agent_mesh/config_portal/frontend/static/client/assets/root-DX4gQ516.css +0 -1
- solace_agent_mesh/configs/agent_global.yaml +0 -74
- solace_agent_mesh/configs/agent_image_processing.yaml +0 -82
- solace_agent_mesh/configs/agent_slack.yaml +0 -64
- solace_agent_mesh/configs/agent_web_request.yaml +0 -75
- solace_agent_mesh/configs/conversation_to_file.yaml +0 -56
- solace_agent_mesh/configs/error_catcher.yaml +0 -56
- solace_agent_mesh/configs/monitor.yaml +0 -0
- solace_agent_mesh/configs/monitor_stim_and_errors_to_slack.yaml +0 -109
- solace_agent_mesh/configs/monitor_user_feedback.yaml +0 -58
- solace_agent_mesh/configs/orchestrator.yaml +0 -241
- solace_agent_mesh/configs/service_embedding.yaml +0 -81
- solace_agent_mesh/configs/service_llm.yaml +0 -265
- solace_agent_mesh/configs/visualize_websocket.yaml +0 -55
- solace_agent_mesh/gateway/components/gateway_base.py +0 -47
- solace_agent_mesh/gateway/components/gateway_input.py +0 -278
- solace_agent_mesh/gateway/components/gateway_output.py +0 -298
- solace_agent_mesh/gateway/identity/bamboohr_identity.py +0 -18
- solace_agent_mesh/gateway/identity/identity_base.py +0 -10
- solace_agent_mesh/gateway/identity/identity_provider.py +0 -60
- solace_agent_mesh/gateway/identity/no_identity.py +0 -9
- solace_agent_mesh/gateway/identity/passthru_identity.py +0 -9
- solace_agent_mesh/monitors/base_monitor_component.py +0 -26
- solace_agent_mesh/monitors/feedback/user_feedback_monitor.py +0 -75
- solace_agent_mesh/monitors/stim_and_errors/stim_and_error_monitor.py +0 -560
- solace_agent_mesh/orchestrator/__init__.py +0 -0
- solace_agent_mesh/orchestrator/action_manager.py +0 -237
- solace_agent_mesh/orchestrator/components/__init__.py +0 -0
- solace_agent_mesh/orchestrator/components/orchestrator_action_manager_timeout_component.py +0 -58
- solace_agent_mesh/orchestrator/components/orchestrator_action_response_component.py +0 -179
- solace_agent_mesh/orchestrator/components/orchestrator_register_component.py +0 -107
- solace_agent_mesh/orchestrator/components/orchestrator_stimulus_processor_component.py +0 -527
- solace_agent_mesh/orchestrator/components/orchestrator_streaming_output_component.py +0 -260
- solace_agent_mesh/orchestrator/orchestrator_main.py +0 -172
- solace_agent_mesh/orchestrator/orchestrator_prompt.py +0 -539
- solace_agent_mesh/services/__init__.py +0 -0
- solace_agent_mesh/services/authorization/providers/base_authorization_provider.py +0 -56
- solace_agent_mesh/services/bamboo_hr_service/__init__.py +0 -3
- solace_agent_mesh/services/bamboo_hr_service/bamboo_hr.py +0 -182
- solace_agent_mesh/services/common/__init__.py +0 -4
- solace_agent_mesh/services/common/auto_expiry.py +0 -45
- solace_agent_mesh/services/common/singleton.py +0 -18
- solace_agent_mesh/services/file_service/__init__.py +0 -14
- solace_agent_mesh/services/file_service/file_manager/__init__.py +0 -0
- solace_agent_mesh/services/file_service/file_manager/bucket_file_manager.py +0 -149
- solace_agent_mesh/services/file_service/file_manager/file_manager_base.py +0 -162
- solace_agent_mesh/services/file_service/file_manager/memory_file_manager.py +0 -64
- solace_agent_mesh/services/file_service/file_manager/volume_file_manager.py +0 -106
- solace_agent_mesh/services/file_service/file_service.py +0 -437
- solace_agent_mesh/services/file_service/file_service_constants.py +0 -54
- solace_agent_mesh/services/file_service/file_transformations.py +0 -141
- solace_agent_mesh/services/file_service/file_utils.py +0 -324
- solace_agent_mesh/services/file_service/transformers/__init__.py +0 -5
- solace_agent_mesh/services/history_service/__init__.py +0 -3
- solace_agent_mesh/services/history_service/history_providers/__init__.py +0 -0
- solace_agent_mesh/services/history_service/history_providers/base_history_provider.py +0 -54
- solace_agent_mesh/services/history_service/history_providers/file_history_provider.py +0 -74
- solace_agent_mesh/services/history_service/history_providers/index.py +0 -40
- solace_agent_mesh/services/history_service/history_providers/memory_history_provider.py +0 -33
- solace_agent_mesh/services/history_service/history_providers/mongodb_history_provider.py +0 -66
- solace_agent_mesh/services/history_service/history_providers/redis_history_provider.py +0 -66
- solace_agent_mesh/services/history_service/history_providers/sql_history_provider.py +0 -93
- solace_agent_mesh/services/history_service/history_service.py +0 -413
- solace_agent_mesh/services/history_service/long_term_memory/__init__.py +0 -0
- solace_agent_mesh/services/history_service/long_term_memory/long_term_memory.py +0 -399
- solace_agent_mesh/services/llm_service/components/llm_request_component.py +0 -340
- solace_agent_mesh/services/llm_service/components/llm_service_component_base.py +0 -152
- solace_agent_mesh/services/middleware_service/__init__.py +0 -0
- solace_agent_mesh/services/middleware_service/middleware_service.py +0 -20
- solace_agent_mesh/templates/action.py +0 -38
- solace_agent_mesh/templates/agent.py +0 -29
- solace_agent_mesh/templates/agent.yaml +0 -70
- solace_agent_mesh/templates/gateway-config-template.yaml +0 -6
- solace_agent_mesh/templates/gateway-default-config.yaml +0 -28
- solace_agent_mesh/templates/gateway-flows.yaml +0 -78
- solace_agent_mesh/templates/gateway-header.yaml +0 -16
- solace_agent_mesh/templates/gateway_base.py +0 -15
- solace_agent_mesh/templates/gateway_input.py +0 -98
- solace_agent_mesh/templates/gateway_output.py +0 -71
- solace_agent_mesh/templates/plugin-gateway-default-config.yaml +0 -29
- solace_agent_mesh/templates/plugin-pyproject.toml +0 -30
- solace_agent_mesh/templates/rest-api-default-config.yaml +0 -31
- solace_agent_mesh/templates/rest-api-flows.yaml +0 -81
- solace_agent_mesh/templates/slack-default-config.yaml +0 -16
- solace_agent_mesh/templates/slack-flows.yaml +0 -81
- solace_agent_mesh/templates/solace-agent-mesh-default.yaml +0 -86
- solace_agent_mesh/templates/solace-agent-mesh-plugin-default.yaml +0 -8
- solace_agent_mesh/templates/web-default-config.yaml +0 -10
- solace_agent_mesh/templates/web-flows.yaml +0 -76
- solace_agent_mesh/tools/__init__.py +0 -0
- solace_agent_mesh/tools/components/__init__.py +0 -0
- solace_agent_mesh/tools/components/conversation_formatter.py +0 -111
- solace_agent_mesh/tools/components/file_resolver_component.py +0 -58
- solace_agent_mesh/tools/config/runtime_config.py +0 -26
- solace_agent_mesh-0.2.4.dist-info/METADATA +0 -176
- solace_agent_mesh-0.2.4.dist-info/RECORD +0 -193
- solace_agent_mesh-0.2.4.dist-info/entry_points.txt +0 -3
- /solace_agent_mesh/{agents → agent}/__init__.py +0 -0
- /solace_agent_mesh/{agents/global → agent/adk}/__init__.py +0 -0
- /solace_agent_mesh/{agents/global/actions → agent/protocol}/__init__.py +0 -0
- /solace_agent_mesh/{agents/image_processing → agent/sac}/__init__.py +0 -0
- /solace_agent_mesh/{agents/image_processing/actions → agent/utils}/__init__.py +0 -0
- /solace_agent_mesh/{agents/web_request → config_portal/backend/plugin_catalog}/__init__.py +0 -0
- /solace_agent_mesh/{agents/web_request/actions → evaluation}/__init__.py +0 -0
- /solace_agent_mesh/gateway/{components → http_sse}/__init__.py +0 -0
- {solace_agent_mesh-0.2.4.dist-info → solace_agent_mesh-1.0.1.dist-info}/WHEEL +0 -0
|
@@ -0,0 +1,3388 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Custom Solace AI Connector Component to Host Google ADK Agents via A2A Protocol.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from typing import Any, Dict, Optional, Union, Callable, List, Tuple, TYPE_CHECKING
|
|
6
|
+
import asyncio
|
|
7
|
+
import functools
|
|
8
|
+
import threading
|
|
9
|
+
import concurrent.futures
|
|
10
|
+
import fnmatch
|
|
11
|
+
import base64
|
|
12
|
+
from datetime import datetime, timezone
|
|
13
|
+
import json
|
|
14
|
+
from solace_ai_connector.components.component_base import ComponentBase
|
|
15
|
+
from solace_ai_connector.common.message import (
|
|
16
|
+
Message as SolaceMessage,
|
|
17
|
+
)
|
|
18
|
+
from solace_ai_connector.common.log import log
|
|
19
|
+
from solace_ai_connector.common.event import Event, EventType
|
|
20
|
+
from solace_ai_connector.common.utils import import_module
|
|
21
|
+
import inspect
|
|
22
|
+
from pydantic import BaseModel, ValidationError
|
|
23
|
+
from google.adk.agents.invocation_context import (
|
|
24
|
+
LlmCallsLimitExceededError,
|
|
25
|
+
)
|
|
26
|
+
from google.adk.agents import RunConfig
|
|
27
|
+
from google.adk.agents.run_config import StreamingMode
|
|
28
|
+
from google.adk.sessions import BaseSessionService
|
|
29
|
+
from google.adk.artifacts import BaseArtifactService
|
|
30
|
+
from google.adk.memory import BaseMemoryService
|
|
31
|
+
from google.adk.agents import LlmAgent
|
|
32
|
+
from google.adk.runners import Runner
|
|
33
|
+
from google.adk.models import LlmResponse
|
|
34
|
+
from google.adk.agents.readonly_context import ReadonlyContext
|
|
35
|
+
from google.adk.events import Event as ADKEvent
|
|
36
|
+
from google.adk.agents.callback_context import CallbackContext
|
|
37
|
+
from google.adk.models.llm_request import LlmRequest
|
|
38
|
+
from google.genai import types as adk_types
|
|
39
|
+
from google.adk.tools.mcp_tool import MCPToolset
|
|
40
|
+
from ...common.types import (
|
|
41
|
+
AgentCard,
|
|
42
|
+
Task,
|
|
43
|
+
TaskStatus,
|
|
44
|
+
TaskState,
|
|
45
|
+
Message as A2AMessage,
|
|
46
|
+
TextPart,
|
|
47
|
+
FilePart,
|
|
48
|
+
DataPart,
|
|
49
|
+
FileContent,
|
|
50
|
+
Artifact as A2AArtifact,
|
|
51
|
+
JSONRPCResponse,
|
|
52
|
+
InternalError,
|
|
53
|
+
TaskStatusUpdateEvent,
|
|
54
|
+
TaskArtifactUpdateEvent,
|
|
55
|
+
SendTaskRequest,
|
|
56
|
+
)
|
|
57
|
+
from ...common.a2a_protocol import (
|
|
58
|
+
get_a2a_base_topic,
|
|
59
|
+
get_discovery_topic,
|
|
60
|
+
get_agent_request_topic,
|
|
61
|
+
get_agent_response_topic,
|
|
62
|
+
get_client_response_topic,
|
|
63
|
+
get_peer_agent_status_topic,
|
|
64
|
+
format_and_route_adk_event,
|
|
65
|
+
get_gateway_status_topic,
|
|
66
|
+
)
|
|
67
|
+
from ...agent.utils.config_parser import resolve_instruction_provider
|
|
68
|
+
from ...agent.utils.artifact_helpers import get_latest_artifact_version
|
|
69
|
+
from ...agent.adk.services import (
|
|
70
|
+
initialize_session_service,
|
|
71
|
+
initialize_artifact_service,
|
|
72
|
+
initialize_memory_service,
|
|
73
|
+
)
|
|
74
|
+
from ...agent.adk.setup import (
|
|
75
|
+
load_adk_tools,
|
|
76
|
+
initialize_adk_agent,
|
|
77
|
+
initialize_adk_runner,
|
|
78
|
+
)
|
|
79
|
+
from ...agent.protocol.event_handlers import (
|
|
80
|
+
process_event,
|
|
81
|
+
publish_agent_card,
|
|
82
|
+
)
|
|
83
|
+
from ...agent.adk.runner import run_adk_async_task_thread_wrapper, TaskCancelledError
|
|
84
|
+
from ...agent.tools.peer_agent_tool import (
|
|
85
|
+
CORRELATION_DATA_PREFIX,
|
|
86
|
+
PeerAgentTool,
|
|
87
|
+
PEER_TOOL_PREFIX,
|
|
88
|
+
)
|
|
89
|
+
from ...agent.adk.invocation_monitor import InvocationMonitor
|
|
90
|
+
from ...common.middleware.registry import MiddlewareRegistry
|
|
91
|
+
from ...common.constants import DEFAULT_COMMUNICATION_TIMEOUT
|
|
92
|
+
from ...agent.tools.registry import tool_registry
|
|
93
|
+
|
|
94
|
+
if TYPE_CHECKING:
|
|
95
|
+
from .task_execution_context import TaskExecutionContext
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
info = {
|
|
99
|
+
"class_name": "SamAgentComponent",
|
|
100
|
+
"description": (
|
|
101
|
+
"Hosts a Google ADK agent and bridges communication via the A2A protocol over Solace. "
|
|
102
|
+
"NOTE: Configuration is defined in the app-level 'app_config' block "
|
|
103
|
+
"and validated by 'SamAgentApp.app_schema' when using the associated App class."
|
|
104
|
+
),
|
|
105
|
+
"config_parameters": [],
|
|
106
|
+
"input_schema": {
|
|
107
|
+
"type": "object",
|
|
108
|
+
"description": "Not typically used; component reacts to events.",
|
|
109
|
+
"properties": {},
|
|
110
|
+
},
|
|
111
|
+
"output_schema": {
|
|
112
|
+
"type": "object",
|
|
113
|
+
"description": "Not typically used; component publishes results to Solace.",
|
|
114
|
+
"properties": {},
|
|
115
|
+
},
|
|
116
|
+
}
|
|
117
|
+
InstructionProvider = Callable[[ReadonlyContext], str]
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
class SamAgentComponent(ComponentBase):
|
|
121
|
+
"""
|
|
122
|
+
A Solace AI Connector component that hosts a Google ADK agent,
|
|
123
|
+
communicating via the A2A protocol over Solace.
|
|
124
|
+
"""
|
|
125
|
+
|
|
126
|
+
CORRELATION_DATA_PREFIX = CORRELATION_DATA_PREFIX
|
|
127
|
+
HOST_COMPONENT_VERSION = "1.0.0-alpha"
|
|
128
|
+
|
|
129
|
+
def __init__(self, **kwargs):
|
|
130
|
+
"""
|
|
131
|
+
Initializes the A2A_ADK_HostComponent.
|
|
132
|
+
Args:
|
|
133
|
+
**kwargs: Configuration parameters passed from the SAC framework.
|
|
134
|
+
Expects configuration under app_config.
|
|
135
|
+
"""
|
|
136
|
+
if "component_config" in kwargs and "app_config" in kwargs["component_config"]:
|
|
137
|
+
name = kwargs["component_config"]["app_config"].get("agent_name")
|
|
138
|
+
if name:
|
|
139
|
+
kwargs.setdefault("name", name)
|
|
140
|
+
|
|
141
|
+
super().__init__(info, **kwargs)
|
|
142
|
+
self.agent_name = self.get_config("agent_name")
|
|
143
|
+
log.info("%s Initializing A2A ADK Host Component...", self.log_identifier)
|
|
144
|
+
try:
|
|
145
|
+
self.namespace = self.get_config("namespace")
|
|
146
|
+
if not self.namespace:
|
|
147
|
+
raise ValueError("Internal Error: Namespace missing after validation.")
|
|
148
|
+
self.supports_streaming = self.get_config("supports_streaming", False)
|
|
149
|
+
self.stream_batching_threshold_bytes = self.get_config(
|
|
150
|
+
"stream_batching_threshold_bytes", 0
|
|
151
|
+
)
|
|
152
|
+
self.agent_name = self.get_config("agent_name")
|
|
153
|
+
if not self.agent_name:
|
|
154
|
+
raise ValueError("Internal Error: Agent name missing after validation.")
|
|
155
|
+
self.model_config = self.get_config("model")
|
|
156
|
+
if not self.model_config:
|
|
157
|
+
raise ValueError(
|
|
158
|
+
"Internal Error: Model config missing after validation."
|
|
159
|
+
)
|
|
160
|
+
self.instruction_config = self.get_config("instruction", "")
|
|
161
|
+
self.global_instruction_config = self.get_config("global_instruction", "")
|
|
162
|
+
self.tools_config = self.get_config("tools", [])
|
|
163
|
+
self.planner_config = self.get_config("planner")
|
|
164
|
+
self.code_executor_config = self.get_config("code_executor")
|
|
165
|
+
self.session_service_config = self.get_config("session_service")
|
|
166
|
+
if not self.session_service_config:
|
|
167
|
+
raise ValueError(
|
|
168
|
+
"Internal Error: Session service config missing after validation."
|
|
169
|
+
)
|
|
170
|
+
self.default_session_behavior = self.session_service_config.get(
|
|
171
|
+
"default_behavior", "PERSISTENT"
|
|
172
|
+
).upper()
|
|
173
|
+
if self.default_session_behavior not in ["PERSISTENT", "RUN_BASED"]:
|
|
174
|
+
log.warning(
|
|
175
|
+
"%s Invalid 'default_behavior' in session_service_config: '%s'. Defaulting to PERSISTENT.",
|
|
176
|
+
self.log_identifier,
|
|
177
|
+
self.default_session_behavior,
|
|
178
|
+
)
|
|
179
|
+
self.default_session_behavior = "PERSISTENT"
|
|
180
|
+
log.info(
|
|
181
|
+
"%s Default session behavior set to: %s",
|
|
182
|
+
self.log_identifier,
|
|
183
|
+
self.default_session_behavior,
|
|
184
|
+
)
|
|
185
|
+
self.artifact_service_config = self.get_config(
|
|
186
|
+
"artifact_service", {"type": "memory"}
|
|
187
|
+
)
|
|
188
|
+
self.memory_service_config = self.get_config(
|
|
189
|
+
"memory_service", {"type": "memory"}
|
|
190
|
+
)
|
|
191
|
+
self.artifact_handling_mode = self.get_config(
|
|
192
|
+
"artifact_handling_mode", "ignore"
|
|
193
|
+
).lower()
|
|
194
|
+
if self.artifact_handling_mode not in ["ignore", "embed", "reference"]:
|
|
195
|
+
log.warning(
|
|
196
|
+
"%s Invalid artifact_handling_mode '%s'. Defaulting to 'ignore'.",
|
|
197
|
+
self.log_identifier,
|
|
198
|
+
self.artifact_handling_mode,
|
|
199
|
+
)
|
|
200
|
+
self.artifact_handling_mode = "ignore"
|
|
201
|
+
log.info(
|
|
202
|
+
"%s Artifact Handling Mode: %s",
|
|
203
|
+
self.log_identifier,
|
|
204
|
+
self.artifact_handling_mode,
|
|
205
|
+
)
|
|
206
|
+
if self.artifact_handling_mode == "reference":
|
|
207
|
+
log.warning(
|
|
208
|
+
"%s Artifact handling mode 'reference' selected, but this component does not currently host an endpoint to serve artifacts. Clients may not be able to retrieve referenced artifacts.",
|
|
209
|
+
self.log_identifier,
|
|
210
|
+
)
|
|
211
|
+
self.agent_card_config = self.get_config("agent_card")
|
|
212
|
+
if not self.agent_card_config:
|
|
213
|
+
raise ValueError(
|
|
214
|
+
"Internal Error: Agent card config missing after validation."
|
|
215
|
+
)
|
|
216
|
+
self.agent_card_publishing_config = self.get_config("agent_card_publishing")
|
|
217
|
+
if not self.agent_card_publishing_config:
|
|
218
|
+
raise ValueError(
|
|
219
|
+
"Internal Error: Agent card publishing config missing after validation."
|
|
220
|
+
)
|
|
221
|
+
self.agent_discovery_config = self.get_config("agent_discovery")
|
|
222
|
+
if not self.agent_discovery_config:
|
|
223
|
+
raise ValueError(
|
|
224
|
+
"Internal Error: Agent discovery config missing after validation."
|
|
225
|
+
)
|
|
226
|
+
self.inter_agent_communication_config = self.get_config(
|
|
227
|
+
"inter_agent_communication"
|
|
228
|
+
)
|
|
229
|
+
if not self.inter_agent_communication_config:
|
|
230
|
+
raise ValueError(
|
|
231
|
+
"Internal Error: Inter-agent comms config missing after validation."
|
|
232
|
+
)
|
|
233
|
+
log.info("%s Configuration retrieved successfully.", self.log_identifier)
|
|
234
|
+
except Exception as e:
|
|
235
|
+
log.error(
|
|
236
|
+
"%s Failed to retrieve configuration via get_config: %s",
|
|
237
|
+
self.log_identifier,
|
|
238
|
+
e,
|
|
239
|
+
)
|
|
240
|
+
raise ValueError(f"Configuration retrieval error: {e}") from e
|
|
241
|
+
self.session_service: BaseSessionService = None
|
|
242
|
+
self.artifact_service: BaseArtifactService = None
|
|
243
|
+
self.memory_service: BaseMemoryService = None
|
|
244
|
+
self.adk_agent: LlmAgent = None
|
|
245
|
+
self.runner: Runner = None
|
|
246
|
+
self.agent_card_tool_manifest: List[Dict[str, Any]] = []
|
|
247
|
+
self.peer_agents: Dict[str, Any] = {}
|
|
248
|
+
self._card_publish_timer_id: str = f"publish_card_{self.agent_name}"
|
|
249
|
+
self._async_loop = None
|
|
250
|
+
self._async_thread = None
|
|
251
|
+
self._async_init_future = None
|
|
252
|
+
self.peer_response_queues: Dict[str, asyncio.Queue] = {}
|
|
253
|
+
self.peer_response_queue_lock = threading.Lock()
|
|
254
|
+
self.agent_specific_state: Dict[str, Any] = {}
|
|
255
|
+
self.active_tasks: Dict[str, "TaskExecutionContext"] = {}
|
|
256
|
+
self.active_tasks_lock = threading.Lock()
|
|
257
|
+
self._agent_system_instruction_string: Optional[str] = None
|
|
258
|
+
self._agent_system_instruction_callback: Optional[
|
|
259
|
+
Callable[[CallbackContext, LlmRequest], Optional[str]]
|
|
260
|
+
] = None
|
|
261
|
+
self.invocation_monitor: Optional[InvocationMonitor] = None
|
|
262
|
+
self._active_background_tasks = set()
|
|
263
|
+
try:
|
|
264
|
+
self.agent_specific_state: Dict[str, Any] = {}
|
|
265
|
+
init_func_details = self.get_config("agent_init_function")
|
|
266
|
+
if init_func_details and isinstance(init_func_details, dict):
|
|
267
|
+
module_name = init_func_details.get("module")
|
|
268
|
+
func_name = init_func_details.get("name")
|
|
269
|
+
base_path = init_func_details.get("base_path")
|
|
270
|
+
specific_init_params_dict = init_func_details.get("config", {})
|
|
271
|
+
if module_name and func_name:
|
|
272
|
+
log.info(
|
|
273
|
+
"%s Attempting to load init_function: %s.%s",
|
|
274
|
+
self.log_identifier,
|
|
275
|
+
module_name,
|
|
276
|
+
func_name,
|
|
277
|
+
)
|
|
278
|
+
try:
|
|
279
|
+
module = import_module(module_name, base_path=base_path)
|
|
280
|
+
init_function = getattr(module, func_name)
|
|
281
|
+
if not callable(init_function):
|
|
282
|
+
raise TypeError(
|
|
283
|
+
f"Init function '{func_name}' in module '{module_name}' is not callable."
|
|
284
|
+
)
|
|
285
|
+
sig = inspect.signature(init_function)
|
|
286
|
+
pydantic_config_model = None
|
|
287
|
+
config_param_name = None
|
|
288
|
+
validated_config_arg = specific_init_params_dict
|
|
289
|
+
for param_name_sig, param_sig in sig.parameters.items():
|
|
290
|
+
if (
|
|
291
|
+
param_sig.annotation is not inspect.Parameter.empty
|
|
292
|
+
and isinstance(param_sig.annotation, type)
|
|
293
|
+
and issubclass(param_sig.annotation, BaseModel)
|
|
294
|
+
):
|
|
295
|
+
pydantic_config_model = param_sig.annotation
|
|
296
|
+
config_param_name = param_name_sig
|
|
297
|
+
break
|
|
298
|
+
if pydantic_config_model and config_param_name:
|
|
299
|
+
log.info(
|
|
300
|
+
"%s Found Pydantic config model '%s' for init_function parameter '%s'.",
|
|
301
|
+
self.log_identifier,
|
|
302
|
+
pydantic_config_model.__name__,
|
|
303
|
+
config_param_name,
|
|
304
|
+
)
|
|
305
|
+
try:
|
|
306
|
+
validated_config_arg = pydantic_config_model(
|
|
307
|
+
**specific_init_params_dict
|
|
308
|
+
)
|
|
309
|
+
except ValidationError as ve:
|
|
310
|
+
log.error(
|
|
311
|
+
"%s Validation error for init_function config using Pydantic model '%s': %s",
|
|
312
|
+
self.log_identifier,
|
|
313
|
+
pydantic_config_model.__name__,
|
|
314
|
+
ve,
|
|
315
|
+
)
|
|
316
|
+
raise ValueError(
|
|
317
|
+
f"Invalid configuration for init_function '{func_name}': {ve}"
|
|
318
|
+
) from ve
|
|
319
|
+
elif (
|
|
320
|
+
config_param_name
|
|
321
|
+
and param_sig.annotation is not inspect.Parameter.empty
|
|
322
|
+
):
|
|
323
|
+
log.warning(
|
|
324
|
+
"%s Config parameter '%s' for init_function '%s' has a type hint '%s', but it's not a Pydantic BaseModel. Passing raw dict.",
|
|
325
|
+
self.log_identifier,
|
|
326
|
+
config_param_name,
|
|
327
|
+
func_name,
|
|
328
|
+
param_sig.annotation,
|
|
329
|
+
)
|
|
330
|
+
else:
|
|
331
|
+
log.info(
|
|
332
|
+
"%s No Pydantic model type hint found for a config parameter of init_function '%s'. Passing raw dict if a config param exists, or only host_component.",
|
|
333
|
+
self.log_identifier,
|
|
334
|
+
func_name,
|
|
335
|
+
)
|
|
336
|
+
func_params_list = list(sig.parameters.values())
|
|
337
|
+
num_actual_params = len(func_params_list)
|
|
338
|
+
if num_actual_params == 1:
|
|
339
|
+
if specific_init_params_dict:
|
|
340
|
+
log.warning(
|
|
341
|
+
"%s Init function '%s' takes 1 argument, but 'config' was provided in YAML. Config will be ignored.",
|
|
342
|
+
self.log_identifier,
|
|
343
|
+
func_name,
|
|
344
|
+
)
|
|
345
|
+
init_function(self)
|
|
346
|
+
elif num_actual_params == 2:
|
|
347
|
+
actual_config_param_name_in_signature = func_params_list[
|
|
348
|
+
1
|
|
349
|
+
].name
|
|
350
|
+
init_function(
|
|
351
|
+
self,
|
|
352
|
+
**{
|
|
353
|
+
actual_config_param_name_in_signature: validated_config_arg
|
|
354
|
+
},
|
|
355
|
+
)
|
|
356
|
+
else:
|
|
357
|
+
raise TypeError(
|
|
358
|
+
f"Init function '{func_name}' has an unsupported signature. "
|
|
359
|
+
f"Expected (host_component_instance) or (host_component_instance, config_param), "
|
|
360
|
+
f"but got {num_actual_params} parameters."
|
|
361
|
+
)
|
|
362
|
+
log.info(
|
|
363
|
+
"%s Successfully executed init_function: %s.%s",
|
|
364
|
+
self.log_identifier,
|
|
365
|
+
module_name,
|
|
366
|
+
func_name,
|
|
367
|
+
)
|
|
368
|
+
except Exception as e:
|
|
369
|
+
log.exception(
|
|
370
|
+
"%s Fatal error during agent initialization via init_function '%s.%s': %s",
|
|
371
|
+
self.log_identifier,
|
|
372
|
+
module_name,
|
|
373
|
+
func_name,
|
|
374
|
+
e,
|
|
375
|
+
)
|
|
376
|
+
raise RuntimeError(
|
|
377
|
+
f"Agent custom initialization failed: {e}"
|
|
378
|
+
) from e
|
|
379
|
+
try:
|
|
380
|
+
self.invocation_monitor = InvocationMonitor()
|
|
381
|
+
except Exception as im_e:
|
|
382
|
+
log.error(
|
|
383
|
+
"%s Failed to initialize InvocationMonitor: %s",
|
|
384
|
+
self.log_identifier,
|
|
385
|
+
im_e,
|
|
386
|
+
)
|
|
387
|
+
self.invocation_monitor = None
|
|
388
|
+
try:
|
|
389
|
+
log.info(
|
|
390
|
+
"%s Initializing synchronous ADK services...", self.log_identifier
|
|
391
|
+
)
|
|
392
|
+
self.session_service = initialize_session_service(self)
|
|
393
|
+
self.artifact_service = initialize_artifact_service(self)
|
|
394
|
+
self.memory_service = initialize_memory_service(self)
|
|
395
|
+
log.info(
|
|
396
|
+
"%s Synchronous ADK services initialized.", self.log_identifier
|
|
397
|
+
)
|
|
398
|
+
except Exception as service_err:
|
|
399
|
+
log.exception(
|
|
400
|
+
"%s Failed to initialize synchronous ADK services: %s",
|
|
401
|
+
self.log_identifier,
|
|
402
|
+
service_err,
|
|
403
|
+
)
|
|
404
|
+
raise RuntimeError(
|
|
405
|
+
f"Failed to initialize synchronous ADK services: {service_err}"
|
|
406
|
+
) from service_err
|
|
407
|
+
log.info(
|
|
408
|
+
"%s Starting dedicated async thread for MCP/ADK initialization...",
|
|
409
|
+
self.log_identifier,
|
|
410
|
+
)
|
|
411
|
+
self._async_loop = asyncio.new_event_loop()
|
|
412
|
+
self._async_init_future = concurrent.futures.Future()
|
|
413
|
+
self._async_thread = threading.Thread(
|
|
414
|
+
target=self._start_async_loop, daemon=True
|
|
415
|
+
)
|
|
416
|
+
self._async_thread.start()
|
|
417
|
+
init_coro_future = asyncio.run_coroutine_threadsafe(
|
|
418
|
+
self._perform_async_init(), self._async_loop
|
|
419
|
+
)
|
|
420
|
+
log.info(
|
|
421
|
+
"%s Waiting for async initialization to complete...",
|
|
422
|
+
self.log_identifier,
|
|
423
|
+
)
|
|
424
|
+
try:
|
|
425
|
+
init_coro_future.result(timeout=60)
|
|
426
|
+
self._async_init_future.result(timeout=1)
|
|
427
|
+
log.info(
|
|
428
|
+
"%s Async initialization completed successfully.",
|
|
429
|
+
self.log_identifier,
|
|
430
|
+
)
|
|
431
|
+
except Exception as init_err:
|
|
432
|
+
log.error(
|
|
433
|
+
"%s Async initialization failed during __init__: %s",
|
|
434
|
+
self.log_identifier,
|
|
435
|
+
init_err,
|
|
436
|
+
)
|
|
437
|
+
self.cleanup()
|
|
438
|
+
raise RuntimeError(
|
|
439
|
+
f"Failed to initialize component asynchronously: {init_err}"
|
|
440
|
+
) from init_err
|
|
441
|
+
publish_interval_sec = self.agent_card_publishing_config.get(
|
|
442
|
+
"interval_seconds"
|
|
443
|
+
)
|
|
444
|
+
if publish_interval_sec and publish_interval_sec > 0:
|
|
445
|
+
log.info(
|
|
446
|
+
"%s Scheduling agent card publishing every %d seconds.",
|
|
447
|
+
self.log_identifier,
|
|
448
|
+
publish_interval_sec,
|
|
449
|
+
)
|
|
450
|
+
self.add_timer(
|
|
451
|
+
delay_ms=1000,
|
|
452
|
+
timer_id=self._card_publish_timer_id,
|
|
453
|
+
interval_ms=publish_interval_sec * 1000,
|
|
454
|
+
)
|
|
455
|
+
else:
|
|
456
|
+
log.warning(
|
|
457
|
+
"%s Agent card publishing interval not configured or invalid, card will not be published periodically.",
|
|
458
|
+
self.log_identifier,
|
|
459
|
+
)
|
|
460
|
+
log.info(
|
|
461
|
+
"%s Initialization complete for agent: %s",
|
|
462
|
+
self.log_identifier,
|
|
463
|
+
self.agent_name,
|
|
464
|
+
)
|
|
465
|
+
except Exception as e:
|
|
466
|
+
log.exception("%s Initialization failed: %s", self.log_identifier, e)
|
|
467
|
+
raise
|
|
468
|
+
|
|
469
|
+
def invoke(self, message: SolaceMessage, data: dict) -> dict:
|
|
470
|
+
"""Placeholder invoke method. Primary logic resides in process_event."""
|
|
471
|
+
log.warning(
|
|
472
|
+
"%s 'invoke' method called, but primary logic resides in 'process_event'. This should not happen in normal operation.",
|
|
473
|
+
self.log_identifier,
|
|
474
|
+
)
|
|
475
|
+
return None
|
|
476
|
+
|
|
477
|
+
def process_event(self, event: Event):
|
|
478
|
+
"""Processes incoming events (Messages, Timers, etc.)."""
|
|
479
|
+
try:
|
|
480
|
+
loop = self.get_async_loop()
|
|
481
|
+
is_loop_running = loop.is_running() if loop else False
|
|
482
|
+
if loop and is_loop_running:
|
|
483
|
+
coro = process_event(self, event)
|
|
484
|
+
future = asyncio.run_coroutine_threadsafe(coro, loop)
|
|
485
|
+
future.add_done_callback(
|
|
486
|
+
functools.partial(
|
|
487
|
+
self._handle_scheduled_task_completion,
|
|
488
|
+
event_type_for_log=event.event_type,
|
|
489
|
+
)
|
|
490
|
+
)
|
|
491
|
+
else:
|
|
492
|
+
log.error(
|
|
493
|
+
"%s Async loop not available or not running (loop is %s, is_running: %s). Cannot process event: %s",
|
|
494
|
+
self.log_identifier,
|
|
495
|
+
"present" if loop else "None",
|
|
496
|
+
is_loop_running,
|
|
497
|
+
event.event_type,
|
|
498
|
+
)
|
|
499
|
+
if event.event_type == EventType.MESSAGE:
|
|
500
|
+
try:
|
|
501
|
+
event.data.call_negative_acknowledgements()
|
|
502
|
+
log.warning(
|
|
503
|
+
"%s NACKed message due to unavailable async loop for event processing.",
|
|
504
|
+
self.log_identifier,
|
|
505
|
+
)
|
|
506
|
+
except Exception as nack_e:
|
|
507
|
+
log.error(
|
|
508
|
+
"%s Failed to NACK message after async loop issue: %s",
|
|
509
|
+
self.log_identifier,
|
|
510
|
+
nack_e,
|
|
511
|
+
)
|
|
512
|
+
except Exception as e:
|
|
513
|
+
log.error(
|
|
514
|
+
"%s Error processing event: %s. Exception: %s",
|
|
515
|
+
self.log_identifier,
|
|
516
|
+
event.event_type,
|
|
517
|
+
e,
|
|
518
|
+
)
|
|
519
|
+
if event.event_type == EventType.MESSAGE:
|
|
520
|
+
try:
|
|
521
|
+
event.data.call_negative_acknowledgements()
|
|
522
|
+
log.warning(
|
|
523
|
+
"%s NACKed message due to error in event processing.",
|
|
524
|
+
self.log_identifier,
|
|
525
|
+
)
|
|
526
|
+
except Exception as nack_e:
|
|
527
|
+
log.error(
|
|
528
|
+
"%s Failed to NACK message after error in event processing: %s",
|
|
529
|
+
self.log_identifier,
|
|
530
|
+
nack_e,
|
|
531
|
+
)
|
|
532
|
+
|
|
533
|
+
def handle_timer_event(self, timer_data: Dict[str, Any]):
|
|
534
|
+
"""Handles timer events, specifically for agent card publishing."""
|
|
535
|
+
log.debug("%s Received timer event: %s", self.log_identifier, timer_data)
|
|
536
|
+
if timer_data.get("timer_id") == self._card_publish_timer_id:
|
|
537
|
+
publish_agent_card(self)
|
|
538
|
+
|
|
539
|
+
def handle_cache_expiry_event(self, cache_data: Dict[str, Any]):
|
|
540
|
+
"""Handles cache expiry events, specifically for peer timeouts."""
|
|
541
|
+
log.debug("%s Received cache expiry event: %s", self.log_identifier, cache_data)
|
|
542
|
+
expired_key = cache_data.get("key")
|
|
543
|
+
expired_data = cache_data.get("expired_data")
|
|
544
|
+
|
|
545
|
+
if expired_key and expired_key.startswith(CORRELATION_DATA_PREFIX):
|
|
546
|
+
sub_task_id = expired_key
|
|
547
|
+
log.warning(
|
|
548
|
+
"%s Detected timeout for sub-task ID: %s",
|
|
549
|
+
self.log_identifier,
|
|
550
|
+
sub_task_id,
|
|
551
|
+
)
|
|
552
|
+
if expired_data:
|
|
553
|
+
try:
|
|
554
|
+
original_task_context = expired_data.get("original_task_context")
|
|
555
|
+
if original_task_context:
|
|
556
|
+
self._handle_peer_timeout(sub_task_id, expired_data)
|
|
557
|
+
else:
|
|
558
|
+
log.error(
|
|
559
|
+
"%s Missing 'original_task_context' in expired cache data for sub-task %s. Cannot process timeout.",
|
|
560
|
+
self.log_identifier,
|
|
561
|
+
sub_task_id,
|
|
562
|
+
)
|
|
563
|
+
except Exception as e:
|
|
564
|
+
log.exception(
|
|
565
|
+
"%s Error handling peer timeout for sub-task %s: %s",
|
|
566
|
+
self.log_identifier,
|
|
567
|
+
sub_task_id,
|
|
568
|
+
e,
|
|
569
|
+
)
|
|
570
|
+
else:
|
|
571
|
+
log.error(
|
|
572
|
+
"%s Missing expired_data in cache expiry event for sub-task %s. Cannot process timeout.",
|
|
573
|
+
self.log_identifier,
|
|
574
|
+
sub_task_id,
|
|
575
|
+
)
|
|
576
|
+
else:
|
|
577
|
+
log.debug(
|
|
578
|
+
"%s Cache expiry for key '%s' is not a peer sub-task timeout.",
|
|
579
|
+
self.log_identifier,
|
|
580
|
+
expired_key,
|
|
581
|
+
)
|
|
582
|
+
|
|
583
|
+
async def _retrigger_agent_with_peer_responses(
|
|
584
|
+
self,
|
|
585
|
+
results_to_inject: list,
|
|
586
|
+
correlation_data: dict,
|
|
587
|
+
task_context: "TaskExecutionContext",
|
|
588
|
+
):
|
|
589
|
+
"""
|
|
590
|
+
Injects peer tool responses into the session history and re-triggers the ADK runner.
|
|
591
|
+
This function contains the logic to correctly merge parallel tool call responses.
|
|
592
|
+
"""
|
|
593
|
+
original_task_context = correlation_data.get("original_task_context")
|
|
594
|
+
logical_task_id = correlation_data.get("logical_task_id")
|
|
595
|
+
paused_invocation_id = correlation_data.get("invocation_id")
|
|
596
|
+
log_retrigger = f"{self.log_identifier}[RetriggerManager:{logical_task_id}]"
|
|
597
|
+
|
|
598
|
+
try:
|
|
599
|
+
effective_session_id = original_task_context.get("effective_session_id")
|
|
600
|
+
user_id = original_task_context.get("user_id")
|
|
601
|
+
session = await self.session_service.get_session(
|
|
602
|
+
app_name=self.agent_name,
|
|
603
|
+
user_id=user_id,
|
|
604
|
+
session_id=effective_session_id,
|
|
605
|
+
)
|
|
606
|
+
if not session:
|
|
607
|
+
raise RuntimeError(
|
|
608
|
+
f"Could not find ADK session '{effective_session_id}'"
|
|
609
|
+
)
|
|
610
|
+
|
|
611
|
+
new_response_parts = []
|
|
612
|
+
for result in results_to_inject:
|
|
613
|
+
part = adk_types.Part.from_function_response(
|
|
614
|
+
name=result["peer_tool_name"],
|
|
615
|
+
response=result["payload"],
|
|
616
|
+
)
|
|
617
|
+
part.function_response.id = result["adk_function_call_id"]
|
|
618
|
+
new_response_parts.append(part)
|
|
619
|
+
|
|
620
|
+
# Always create a new event for the incoming peer responses.
|
|
621
|
+
# The ADK's `contents` processor is responsible for merging multiple
|
|
622
|
+
# tool responses into a single message before the next LLM call.
|
|
623
|
+
log.info(
|
|
624
|
+
"%s Creating a new tool response event for %d peer responses.",
|
|
625
|
+
log_retrigger,
|
|
626
|
+
len(new_response_parts),
|
|
627
|
+
)
|
|
628
|
+
new_adk_event = ADKEvent(
|
|
629
|
+
invocation_id=paused_invocation_id,
|
|
630
|
+
author=self.agent_name,
|
|
631
|
+
content=adk_types.Content(role="tool", parts=new_response_parts),
|
|
632
|
+
)
|
|
633
|
+
await self.session_service.append_event(
|
|
634
|
+
session=session, event=new_adk_event
|
|
635
|
+
)
|
|
636
|
+
|
|
637
|
+
# Always use SSE streaming mode for the ADK runner, even on re-trigger.
|
|
638
|
+
# This ensures that real-time callbacks for status updates and artifact
|
|
639
|
+
# creation can function correctly for all turns of a task.
|
|
640
|
+
streaming_mode = StreamingMode.SSE
|
|
641
|
+
max_llm_calls = self.get_config("max_llm_calls_per_task", 20)
|
|
642
|
+
run_config = RunConfig(
|
|
643
|
+
streaming_mode=streaming_mode, max_llm_calls=max_llm_calls
|
|
644
|
+
)
|
|
645
|
+
|
|
646
|
+
log.info(
|
|
647
|
+
"%s Re-triggering ADK runner for main task %s.",
|
|
648
|
+
log_retrigger,
|
|
649
|
+
logical_task_id,
|
|
650
|
+
)
|
|
651
|
+
try:
|
|
652
|
+
await run_adk_async_task_thread_wrapper(
|
|
653
|
+
self, session, None, run_config, original_task_context
|
|
654
|
+
)
|
|
655
|
+
finally:
|
|
656
|
+
log.info(
|
|
657
|
+
"%s Cleaning up parallel invocation state for invocation %s.",
|
|
658
|
+
log_retrigger,
|
|
659
|
+
paused_invocation_id,
|
|
660
|
+
)
|
|
661
|
+
task_context.clear_parallel_invocation_state(paused_invocation_id)
|
|
662
|
+
|
|
663
|
+
except Exception as e:
|
|
664
|
+
log.exception(
|
|
665
|
+
"%s Failed to re-trigger ADK runner for task %s: %s",
|
|
666
|
+
log_retrigger,
|
|
667
|
+
logical_task_id,
|
|
668
|
+
e,
|
|
669
|
+
)
|
|
670
|
+
if original_task_context:
|
|
671
|
+
loop = self.get_async_loop()
|
|
672
|
+
if loop and loop.is_running():
|
|
673
|
+
asyncio.run_coroutine_threadsafe(
|
|
674
|
+
self.finalize_task_error(e, original_task_context), loop
|
|
675
|
+
)
|
|
676
|
+
else:
|
|
677
|
+
log.error(
|
|
678
|
+
"%s Async loop not available. Cannot schedule error finalization for task %s.",
|
|
679
|
+
log_retrigger,
|
|
680
|
+
logical_task_id,
|
|
681
|
+
)
|
|
682
|
+
|
|
683
|
+
async def _handle_peer_timeout(
|
|
684
|
+
self,
|
|
685
|
+
sub_task_id: str,
|
|
686
|
+
correlation_data: Dict[str, Any],
|
|
687
|
+
):
|
|
688
|
+
"""
|
|
689
|
+
Handles the timeout of a peer agent task by updating the completion counter
|
|
690
|
+
and potentially re-triggering the runner if all parallel tasks are now complete.
|
|
691
|
+
"""
|
|
692
|
+
logical_task_id = correlation_data.get("logical_task_id")
|
|
693
|
+
invocation_id = correlation_data.get("invocation_id")
|
|
694
|
+
log_retrigger = f"{self.log_identifier}[RetriggerManager:{logical_task_id}]"
|
|
695
|
+
|
|
696
|
+
log.warning(
|
|
697
|
+
"%s Peer request timed out for sub-task: %s (Invocation: %s)",
|
|
698
|
+
log_retrigger,
|
|
699
|
+
sub_task_id,
|
|
700
|
+
invocation_id,
|
|
701
|
+
)
|
|
702
|
+
|
|
703
|
+
with self.active_tasks_lock:
|
|
704
|
+
task_context = self.active_tasks.get(logical_task_id)
|
|
705
|
+
|
|
706
|
+
if not task_context:
|
|
707
|
+
log.warning(
|
|
708
|
+
"%s TaskExecutionContext not found for task %s. Ignoring timeout event.",
|
|
709
|
+
log_retrigger,
|
|
710
|
+
logical_task_id,
|
|
711
|
+
)
|
|
712
|
+
return
|
|
713
|
+
|
|
714
|
+
timeout_value = self.inter_agent_communication_config.get(
|
|
715
|
+
"request_timeout_seconds", DEFAULT_COMMUNICATION_TIMEOUT
|
|
716
|
+
)
|
|
717
|
+
all_sub_tasks_completed = task_context.handle_peer_timeout(
|
|
718
|
+
sub_task_id, correlation_data, timeout_value, invocation_id
|
|
719
|
+
)
|
|
720
|
+
|
|
721
|
+
if not all_sub_tasks_completed:
|
|
722
|
+
log.info(
|
|
723
|
+
"%s Waiting for more peer responses for invocation %s after timeout of sub-task %s.",
|
|
724
|
+
log_retrigger,
|
|
725
|
+
invocation_id,
|
|
726
|
+
sub_task_id,
|
|
727
|
+
)
|
|
728
|
+
return
|
|
729
|
+
|
|
730
|
+
log.info(
|
|
731
|
+
"%s All peer responses/timeouts received for invocation %s. Retriggering agent.",
|
|
732
|
+
log_retrigger,
|
|
733
|
+
invocation_id,
|
|
734
|
+
)
|
|
735
|
+
results_to_inject = task_context.parallel_tool_calls[invocation_id].get(
|
|
736
|
+
"results", []
|
|
737
|
+
)
|
|
738
|
+
|
|
739
|
+
await self._retrigger_agent_with_peer_responses(
|
|
740
|
+
results_to_inject, correlation_data, task_context
|
|
741
|
+
)
|
|
742
|
+
|
|
743
|
+
def _inject_peer_tools_callback(
|
|
744
|
+
self, callback_context: CallbackContext, llm_request: LlmRequest
|
|
745
|
+
) -> Optional[LlmResponse]:
|
|
746
|
+
"""
|
|
747
|
+
ADK before_model_callback to dynamically add PeerAgentTools to the LLM request
|
|
748
|
+
and generate the corresponding instruction text for the LLM.
|
|
749
|
+
"""
|
|
750
|
+
log.debug("%s Running _inject_peer_tools_callback...", self.log_identifier)
|
|
751
|
+
if not self.peer_agents:
|
|
752
|
+
log.debug("%s No peer agents currently discovered.", self.log_identifier)
|
|
753
|
+
return None
|
|
754
|
+
|
|
755
|
+
a2a_context = callback_context.state.get("a2a_context", {})
|
|
756
|
+
user_config = (
|
|
757
|
+
a2a_context.get("a2a_user_config", {})
|
|
758
|
+
if isinstance(a2a_context, dict)
|
|
759
|
+
else {}
|
|
760
|
+
)
|
|
761
|
+
|
|
762
|
+
inter_agent_config = self.get_config("inter_agent_communication", {})
|
|
763
|
+
allow_list = inter_agent_config.get("allow_list", ["*"])
|
|
764
|
+
deny_list = set(self.get_config("deny_list", []))
|
|
765
|
+
self_name = self.get_config("agent_name")
|
|
766
|
+
|
|
767
|
+
peer_tools_to_add = []
|
|
768
|
+
allowed_peer_descriptions = []
|
|
769
|
+
|
|
770
|
+
for peer_name, agent_card in self.peer_agents.items():
|
|
771
|
+
if not isinstance(agent_card, AgentCard) or peer_name == self_name:
|
|
772
|
+
continue
|
|
773
|
+
|
|
774
|
+
is_allowed = any(
|
|
775
|
+
fnmatch.fnmatch(peer_name, p) for p in allow_list
|
|
776
|
+
) and not any(fnmatch.fnmatch(peer_name, p) for p in deny_list)
|
|
777
|
+
|
|
778
|
+
if is_allowed:
|
|
779
|
+
config_resolver = MiddlewareRegistry.get_config_resolver()
|
|
780
|
+
operation_spec = {
|
|
781
|
+
"operation_type": "peer_delegation",
|
|
782
|
+
"target_agent": peer_name,
|
|
783
|
+
"delegation_context": "peer_discovery",
|
|
784
|
+
}
|
|
785
|
+
validation_context = {
|
|
786
|
+
"discovery_phase": "peer_enumeration",
|
|
787
|
+
"agent_context": {"component_type": "peer_discovery"},
|
|
788
|
+
}
|
|
789
|
+
validation_result = config_resolver.validate_operation_config(
|
|
790
|
+
user_config, operation_spec, validation_context
|
|
791
|
+
)
|
|
792
|
+
if not validation_result.get("valid", True):
|
|
793
|
+
log.debug(
|
|
794
|
+
"%s Peer agent '%s' filtered out by user configuration.",
|
|
795
|
+
self.log_identifier,
|
|
796
|
+
peer_name,
|
|
797
|
+
)
|
|
798
|
+
is_allowed = False
|
|
799
|
+
|
|
800
|
+
if not is_allowed:
|
|
801
|
+
continue
|
|
802
|
+
|
|
803
|
+
try:
|
|
804
|
+
peer_tool_instance = PeerAgentTool(
|
|
805
|
+
target_agent_name=peer_name, host_component=self
|
|
806
|
+
)
|
|
807
|
+
if peer_tool_instance.name not in llm_request.tools_dict:
|
|
808
|
+
peer_tools_to_add.append(peer_tool_instance)
|
|
809
|
+
description = (
|
|
810
|
+
getattr(agent_card, "description", "No description")
|
|
811
|
+
or "No description"
|
|
812
|
+
)
|
|
813
|
+
allowed_peer_descriptions.append(
|
|
814
|
+
f"- `peer_{peer_name}`: {description}"
|
|
815
|
+
)
|
|
816
|
+
except Exception as e:
|
|
817
|
+
log.error(
|
|
818
|
+
"%s Failed to create PeerAgentTool for '%s': %s",
|
|
819
|
+
self.log_identifier,
|
|
820
|
+
peer_name,
|
|
821
|
+
e,
|
|
822
|
+
)
|
|
823
|
+
|
|
824
|
+
if allowed_peer_descriptions:
|
|
825
|
+
peer_list_str = "\n".join(allowed_peer_descriptions)
|
|
826
|
+
instruction_text = (
|
|
827
|
+
"You can delegate tasks to other specialized agents if they are better suited.\n"
|
|
828
|
+
"Use the `peer_<agent_name>(task_description: str, user_query: str)` tool for delegation. "
|
|
829
|
+
"Replace `<agent_name>` with the actual name of the target agent.\n"
|
|
830
|
+
"Provide a clear `task_description` for the peer and include the original `user_query` for context.\n"
|
|
831
|
+
"Be aware that the peer agent may not have access to your session history, so you must provide all required context necessary to fulfill the request.\n\n"
|
|
832
|
+
"Available peer agents you can delegate to (use the `peer_...` tool name):\n"
|
|
833
|
+
f"{peer_list_str}"
|
|
834
|
+
)
|
|
835
|
+
callback_context.state["peer_tool_instructions"] = instruction_text
|
|
836
|
+
log.debug(
|
|
837
|
+
"%s Stored peer tool instructions in callback_context.state.",
|
|
838
|
+
self.log_identifier,
|
|
839
|
+
)
|
|
840
|
+
|
|
841
|
+
if peer_tools_to_add:
|
|
842
|
+
try:
|
|
843
|
+
if llm_request.config.tools is None:
|
|
844
|
+
llm_request.config.tools = []
|
|
845
|
+
if len(llm_request.config.tools) > 0:
|
|
846
|
+
for tool in peer_tools_to_add:
|
|
847
|
+
llm_request.tools_dict[tool.name] = tool
|
|
848
|
+
llm_request.config.tools[0].function_declarations.append(
|
|
849
|
+
tool._get_declaration()
|
|
850
|
+
)
|
|
851
|
+
else:
|
|
852
|
+
llm_request.append_tools(peer_tools_to_add)
|
|
853
|
+
log.debug(
|
|
854
|
+
"%s Dynamically added %d PeerAgentTool(s) to LLM request.",
|
|
855
|
+
self.log_identifier,
|
|
856
|
+
len(peer_tools_to_add),
|
|
857
|
+
)
|
|
858
|
+
except Exception as e:
|
|
859
|
+
log.error(
|
|
860
|
+
"%s Failed to append dynamic peer tools to LLM request: %s",
|
|
861
|
+
self.log_identifier,
|
|
862
|
+
e,
|
|
863
|
+
)
|
|
864
|
+
return None
|
|
865
|
+
|
|
866
|
+
def _filter_tools_by_capability_callback(
|
|
867
|
+
self, callback_context: CallbackContext, llm_request: LlmRequest
|
|
868
|
+
) -> Optional[LlmResponse]:
|
|
869
|
+
"""
|
|
870
|
+
ADK before_model_callback to filter tools in the LlmRequest based on user configuration.
|
|
871
|
+
This callback modifies `llm_request.config.tools` in place by potentially
|
|
872
|
+
removing individual FunctionDeclarations from genai.Tool objects or removing
|
|
873
|
+
entire genai.Tool objects if all their declarations are filtered out.
|
|
874
|
+
"""
|
|
875
|
+
log_id_prefix = f"{self.log_identifier}[ToolCapabilityFilter]"
|
|
876
|
+
log.debug("%s Running _filter_tools_by_capability_callback...", log_id_prefix)
|
|
877
|
+
|
|
878
|
+
a2a_context = callback_context.state.get("a2a_context", {})
|
|
879
|
+
if not isinstance(a2a_context, dict):
|
|
880
|
+
log.warning(
|
|
881
|
+
"%s 'a2a_context' in session state is not a dictionary. Using empty configuration.",
|
|
882
|
+
log_id_prefix,
|
|
883
|
+
)
|
|
884
|
+
a2a_context = {}
|
|
885
|
+
user_config = a2a_context.get("a2a_user_config", {})
|
|
886
|
+
if not isinstance(user_config, dict):
|
|
887
|
+
log.warning(
|
|
888
|
+
"%s 'a2a_user_config' in a2a_context is not a dictionary. Using empty configuration.",
|
|
889
|
+
log_id_prefix,
|
|
890
|
+
)
|
|
891
|
+
user_config = {}
|
|
892
|
+
|
|
893
|
+
log.debug(
|
|
894
|
+
"%s User configuration for filtering: %s",
|
|
895
|
+
log_id_prefix,
|
|
896
|
+
{k: v for k, v in user_config.items() if not k.startswith("_")},
|
|
897
|
+
)
|
|
898
|
+
|
|
899
|
+
config_resolver = MiddlewareRegistry.get_config_resolver()
|
|
900
|
+
|
|
901
|
+
if not llm_request.config or not llm_request.config.tools:
|
|
902
|
+
log.debug("%s No tools in request to filter.", log_id_prefix)
|
|
903
|
+
return None
|
|
904
|
+
|
|
905
|
+
explicit_tools_config = self.get_config("tools", [])
|
|
906
|
+
final_filtered_genai_tools: List[adk_types.Tool] = []
|
|
907
|
+
original_genai_tools_count = len(llm_request.config.tools)
|
|
908
|
+
original_function_declarations_count = 0
|
|
909
|
+
|
|
910
|
+
for original_tool in llm_request.config.tools:
|
|
911
|
+
if not original_tool.function_declarations:
|
|
912
|
+
log.warning(
|
|
913
|
+
"%s genai.Tool object has no function declarations. Keeping it.",
|
|
914
|
+
log_id_prefix,
|
|
915
|
+
)
|
|
916
|
+
final_filtered_genai_tools.append(original_tool)
|
|
917
|
+
continue
|
|
918
|
+
|
|
919
|
+
original_function_declarations_count += len(
|
|
920
|
+
original_tool.function_declarations
|
|
921
|
+
)
|
|
922
|
+
permitted_declarations_for_this_tool: List[
|
|
923
|
+
adk_types.FunctionDeclaration
|
|
924
|
+
] = []
|
|
925
|
+
|
|
926
|
+
for func_decl in original_tool.function_declarations:
|
|
927
|
+
func_decl_name = func_decl.name
|
|
928
|
+
tool_source_for_log = "unknown"
|
|
929
|
+
tool_matched_for_capability_lookup = False
|
|
930
|
+
|
|
931
|
+
feature_descriptor = {
|
|
932
|
+
"feature_type": "tool_function",
|
|
933
|
+
"function_name": func_decl_name,
|
|
934
|
+
"tool_source": self._determine_tool_source(func_decl_name),
|
|
935
|
+
"tool_metadata": self._get_tool_metadata(func_decl_name),
|
|
936
|
+
}
|
|
937
|
+
|
|
938
|
+
if func_decl_name.startswith(PEER_TOOL_PREFIX):
|
|
939
|
+
peer_name = func_decl_name.replace(PEER_TOOL_PREFIX, "", 1)
|
|
940
|
+
feature_descriptor["tool_metadata"]["peer_agent_name"] = peer_name
|
|
941
|
+
tool_source_for_log = f"PeerAgentTool ({peer_name})"
|
|
942
|
+
tool_matched_for_capability_lookup = True
|
|
943
|
+
|
|
944
|
+
if not tool_matched_for_capability_lookup:
|
|
945
|
+
tool_def = tool_registry.get_tool_by_name(func_decl_name)
|
|
946
|
+
if tool_def:
|
|
947
|
+
feature_descriptor["tool_metadata"][
|
|
948
|
+
"tool_category"
|
|
949
|
+
] = tool_def.category
|
|
950
|
+
feature_descriptor["tool_metadata"]["builtin_tool"] = True
|
|
951
|
+
tool_source_for_log = (
|
|
952
|
+
f"Built-in Tool ({tool_def.category}/{func_decl_name})"
|
|
953
|
+
)
|
|
954
|
+
tool_matched_for_capability_lookup = True
|
|
955
|
+
|
|
956
|
+
if not tool_matched_for_capability_lookup:
|
|
957
|
+
for tool_cfg in explicit_tools_config:
|
|
958
|
+
cfg_tool_type = tool_cfg.get("tool_type")
|
|
959
|
+
cfg_tool_name = tool_cfg.get("tool_name")
|
|
960
|
+
cfg_func_name = tool_cfg.get("function_name")
|
|
961
|
+
if (
|
|
962
|
+
cfg_tool_type == "python"
|
|
963
|
+
and cfg_func_name == func_decl_name
|
|
964
|
+
) or (
|
|
965
|
+
cfg_tool_type in ["builtin", "mcp"]
|
|
966
|
+
and cfg_tool_name == func_decl_name
|
|
967
|
+
):
|
|
968
|
+
feature_descriptor["tool_metadata"][
|
|
969
|
+
"tool_type"
|
|
970
|
+
] = cfg_tool_type
|
|
971
|
+
feature_descriptor["tool_metadata"][
|
|
972
|
+
"tool_config"
|
|
973
|
+
] = tool_cfg
|
|
974
|
+
tool_source_for_log = f"Explicitly configured tool ({cfg_tool_type}: {cfg_tool_name or cfg_func_name})"
|
|
975
|
+
tool_matched_for_capability_lookup = True
|
|
976
|
+
break
|
|
977
|
+
|
|
978
|
+
if not tool_matched_for_capability_lookup:
|
|
979
|
+
log.debug(
|
|
980
|
+
"%s FunctionDeclaration '%s' not found in any known configuration for capability checking. Assuming feature is available.",
|
|
981
|
+
log_id_prefix,
|
|
982
|
+
func_decl_name,
|
|
983
|
+
)
|
|
984
|
+
tool_source_for_log = "Unmatched/Implicit FunctionDeclaration"
|
|
985
|
+
|
|
986
|
+
context = {
|
|
987
|
+
"agent_context": self.get_agent_context(),
|
|
988
|
+
"filter_phase": "pre_llm",
|
|
989
|
+
"tool_configurations": {
|
|
990
|
+
"explicit_tools": explicit_tools_config,
|
|
991
|
+
},
|
|
992
|
+
}
|
|
993
|
+
|
|
994
|
+
if config_resolver.is_feature_enabled(
|
|
995
|
+
user_config, feature_descriptor, context
|
|
996
|
+
):
|
|
997
|
+
permitted_declarations_for_this_tool.append(func_decl)
|
|
998
|
+
log.debug(
|
|
999
|
+
"%s FunctionDeclaration '%s' (Source: %s) permitted.",
|
|
1000
|
+
log_id_prefix,
|
|
1001
|
+
func_decl_name,
|
|
1002
|
+
tool_source_for_log,
|
|
1003
|
+
)
|
|
1004
|
+
else:
|
|
1005
|
+
log.info(
|
|
1006
|
+
"%s FunctionDeclaration '%s' (Source: %s) FILTERED OUT due to configuration restrictions.",
|
|
1007
|
+
log_id_prefix,
|
|
1008
|
+
func_decl_name,
|
|
1009
|
+
tool_source_for_log,
|
|
1010
|
+
)
|
|
1011
|
+
|
|
1012
|
+
if permitted_declarations_for_this_tool:
|
|
1013
|
+
scoped_tool = original_tool.model_copy(deep=True)
|
|
1014
|
+
scoped_tool.function_declarations = permitted_declarations_for_this_tool
|
|
1015
|
+
|
|
1016
|
+
final_filtered_genai_tools.append(scoped_tool)
|
|
1017
|
+
log.debug(
|
|
1018
|
+
"%s Keeping genai.Tool (original name/type preserved, declarations filtered) as it has %d permitted FunctionDeclaration(s).",
|
|
1019
|
+
log_id_prefix,
|
|
1020
|
+
len(permitted_declarations_for_this_tool),
|
|
1021
|
+
)
|
|
1022
|
+
else:
|
|
1023
|
+
log.info(
|
|
1024
|
+
"%s Entire genai.Tool (original declarations: %s) FILTERED OUT as all its FunctionDeclarations were denied by configuration.",
|
|
1025
|
+
log_id_prefix,
|
|
1026
|
+
[fd.name for fd in original_tool.function_declarations],
|
|
1027
|
+
)
|
|
1028
|
+
|
|
1029
|
+
final_function_declarations_count = sum(
|
|
1030
|
+
len(t.function_declarations)
|
|
1031
|
+
for t in final_filtered_genai_tools
|
|
1032
|
+
if t.function_declarations
|
|
1033
|
+
)
|
|
1034
|
+
|
|
1035
|
+
if final_function_declarations_count != original_function_declarations_count:
|
|
1036
|
+
log.info(
|
|
1037
|
+
"%s Tool list modified by capability filter. Original genai.Tools: %d (Total Declarations: %d). Filtered genai.Tools: %d (Total Declarations: %d).",
|
|
1038
|
+
log_id_prefix,
|
|
1039
|
+
original_genai_tools_count,
|
|
1040
|
+
original_function_declarations_count,
|
|
1041
|
+
len(final_filtered_genai_tools),
|
|
1042
|
+
final_function_declarations_count,
|
|
1043
|
+
)
|
|
1044
|
+
llm_request.config.tools = (
|
|
1045
|
+
final_filtered_genai_tools if final_filtered_genai_tools else None
|
|
1046
|
+
)
|
|
1047
|
+
else:
|
|
1048
|
+
log.debug(
|
|
1049
|
+
"%s Tool list and FunctionDeclarations unchanged after capability filtering.",
|
|
1050
|
+
log_id_prefix,
|
|
1051
|
+
)
|
|
1052
|
+
|
|
1053
|
+
return None
|
|
1054
|
+
|
|
1055
|
+
def _determine_tool_source(self, function_name: str) -> str:
|
|
1056
|
+
"""Determine the source/type of a tool function."""
|
|
1057
|
+
if function_name.startswith("peer_"):
|
|
1058
|
+
return "peer_agent"
|
|
1059
|
+
|
|
1060
|
+
tool_def = tool_registry.get_tool_by_name(function_name)
|
|
1061
|
+
if tool_def:
|
|
1062
|
+
category_map = {
|
|
1063
|
+
"artifact_management": "builtin_artifact",
|
|
1064
|
+
"data_analysis": "builtin_data",
|
|
1065
|
+
}
|
|
1066
|
+
return category_map.get(tool_def.category, "builtin_other")
|
|
1067
|
+
|
|
1068
|
+
return "explicit_tool"
|
|
1069
|
+
|
|
1070
|
+
def _get_tool_metadata(self, function_name: str) -> Dict[str, Any]:
|
|
1071
|
+
"""Get metadata for a tool function."""
|
|
1072
|
+
metadata = {"function_name": function_name}
|
|
1073
|
+
|
|
1074
|
+
if function_name.startswith("peer_"):
|
|
1075
|
+
peer_name = function_name.replace("peer_", "", 1)
|
|
1076
|
+
metadata.update(
|
|
1077
|
+
{"peer_agent_name": peer_name, "operation_type": "delegation"}
|
|
1078
|
+
)
|
|
1079
|
+
return metadata
|
|
1080
|
+
|
|
1081
|
+
tool_def = tool_registry.get_tool_by_name(function_name)
|
|
1082
|
+
if tool_def:
|
|
1083
|
+
metadata.update(
|
|
1084
|
+
{
|
|
1085
|
+
"tool_category": tool_def.category,
|
|
1086
|
+
"required_scopes": tool_def.required_scopes,
|
|
1087
|
+
"builtin_tool": True,
|
|
1088
|
+
}
|
|
1089
|
+
)
|
|
1090
|
+
return metadata
|
|
1091
|
+
|
|
1092
|
+
explicit_tools_config = self.get_config("tools", [])
|
|
1093
|
+
for tool_cfg in explicit_tools_config:
|
|
1094
|
+
cfg_tool_name = tool_cfg.get("tool_name")
|
|
1095
|
+
cfg_func_name = tool_cfg.get("function_name")
|
|
1096
|
+
if (
|
|
1097
|
+
tool_cfg.get("tool_type") == "python" and cfg_func_name == function_name
|
|
1098
|
+
) or (
|
|
1099
|
+
tool_cfg.get("tool_type") in ["builtin", "mcp"]
|
|
1100
|
+
and cfg_tool_name == function_name
|
|
1101
|
+
):
|
|
1102
|
+
metadata.update(
|
|
1103
|
+
{"tool_type": tool_cfg.get("tool_type"), "tool_config": tool_cfg}
|
|
1104
|
+
)
|
|
1105
|
+
break
|
|
1106
|
+
|
|
1107
|
+
return metadata
|
|
1108
|
+
|
|
1109
|
+
def get_agent_context(self) -> Dict[str, Any]:
|
|
1110
|
+
"""Get agent context for middleware calls."""
|
|
1111
|
+
return {
|
|
1112
|
+
"agent_name": getattr(self, "agent_name", "unknown"),
|
|
1113
|
+
"component_type": "sac_agent",
|
|
1114
|
+
}
|
|
1115
|
+
|
|
1116
|
+
def _inject_gateway_instructions_callback(
|
|
1117
|
+
self, callback_context: CallbackContext, llm_request: LlmRequest
|
|
1118
|
+
) -> Optional[LlmResponse]:
|
|
1119
|
+
"""
|
|
1120
|
+
ADK before_model_callback to dynamically prepend gateway-defined system_purpose
|
|
1121
|
+
and response_format to the agent's llm_request.config.system_instruction.
|
|
1122
|
+
"""
|
|
1123
|
+
log_id_prefix = f"{self.log_identifier}[GatewayInstrInject]"
|
|
1124
|
+
log.debug(
|
|
1125
|
+
"%s Running _inject_gateway_instructions_callback to modify system_instruction...",
|
|
1126
|
+
log_id_prefix,
|
|
1127
|
+
)
|
|
1128
|
+
|
|
1129
|
+
a2a_context = callback_context.state.get("a2a_context", {})
|
|
1130
|
+
if not isinstance(a2a_context, dict):
|
|
1131
|
+
log.warning(
|
|
1132
|
+
"%s 'a2a_context' in session state is not a dictionary. Skipping instruction injection.",
|
|
1133
|
+
log_id_prefix,
|
|
1134
|
+
)
|
|
1135
|
+
return None
|
|
1136
|
+
|
|
1137
|
+
system_purpose = a2a_context.get("system_purpose")
|
|
1138
|
+
response_format = a2a_context.get("response_format")
|
|
1139
|
+
user_profile = a2a_context.get("a2a_user_config", {}).get("user_profile")
|
|
1140
|
+
|
|
1141
|
+
inject_purpose = self.get_config("inject_system_purpose", False)
|
|
1142
|
+
inject_format = self.get_config("inject_response_format", False)
|
|
1143
|
+
inject_user_profile = self.get_config("inject_user_profile", False)
|
|
1144
|
+
|
|
1145
|
+
gateway_instructions_to_add = []
|
|
1146
|
+
|
|
1147
|
+
if (
|
|
1148
|
+
inject_purpose
|
|
1149
|
+
and system_purpose
|
|
1150
|
+
and isinstance(system_purpose, str)
|
|
1151
|
+
and system_purpose.strip()
|
|
1152
|
+
):
|
|
1153
|
+
gateway_instructions_to_add.append(
|
|
1154
|
+
f"System Purpose:\n{system_purpose.strip()}"
|
|
1155
|
+
)
|
|
1156
|
+
log.debug(
|
|
1157
|
+
"%s Prepared system_purpose for system_instruction.", log_id_prefix
|
|
1158
|
+
)
|
|
1159
|
+
|
|
1160
|
+
if (
|
|
1161
|
+
inject_format
|
|
1162
|
+
and response_format
|
|
1163
|
+
and isinstance(response_format, str)
|
|
1164
|
+
and response_format.strip()
|
|
1165
|
+
):
|
|
1166
|
+
gateway_instructions_to_add.append(
|
|
1167
|
+
f"Desired Response Format:\n{response_format.strip()}"
|
|
1168
|
+
)
|
|
1169
|
+
log.debug(
|
|
1170
|
+
"%s Prepared response_format for system_instruction.", log_id_prefix
|
|
1171
|
+
)
|
|
1172
|
+
|
|
1173
|
+
if (
|
|
1174
|
+
inject_user_profile
|
|
1175
|
+
and user_profile
|
|
1176
|
+
and (isinstance(user_profile, str) or isinstance(user_profile, dict))
|
|
1177
|
+
):
|
|
1178
|
+
if isinstance(user_profile, dict):
|
|
1179
|
+
user_profile = json.dumps(user_profile, indent=2, default=str)
|
|
1180
|
+
gateway_instructions_to_add.append(
|
|
1181
|
+
f"Inquiring User Profile:\n{user_profile.strip()}\n"
|
|
1182
|
+
)
|
|
1183
|
+
log.debug("%s Prepared user_profile for system_instruction.", log_id_prefix)
|
|
1184
|
+
|
|
1185
|
+
if not gateway_instructions_to_add:
|
|
1186
|
+
log.debug(
|
|
1187
|
+
"%s No gateway instructions to inject into system_instruction.",
|
|
1188
|
+
log_id_prefix,
|
|
1189
|
+
)
|
|
1190
|
+
return None
|
|
1191
|
+
|
|
1192
|
+
if llm_request.config is None:
|
|
1193
|
+
log.warning(
|
|
1194
|
+
"%s llm_request.config is None, cannot append gateway instructions to system_instruction.",
|
|
1195
|
+
log_id_prefix,
|
|
1196
|
+
)
|
|
1197
|
+
return None
|
|
1198
|
+
|
|
1199
|
+
if llm_request.config.system_instruction is None:
|
|
1200
|
+
llm_request.config.system_instruction = ""
|
|
1201
|
+
|
|
1202
|
+
combined_new_instructions = "\n\n".join(gateway_instructions_to_add)
|
|
1203
|
+
|
|
1204
|
+
if llm_request.config.system_instruction:
|
|
1205
|
+
llm_request.config.system_instruction += (
|
|
1206
|
+
f"\n\n---\n\n{combined_new_instructions}"
|
|
1207
|
+
)
|
|
1208
|
+
else:
|
|
1209
|
+
llm_request.config.system_instruction = combined_new_instructions
|
|
1210
|
+
|
|
1211
|
+
log.info(
|
|
1212
|
+
"%s Injected %d gateway instruction block(s) into llm_request.config.system_instruction.",
|
|
1213
|
+
log_id_prefix,
|
|
1214
|
+
len(gateway_instructions_to_add),
|
|
1215
|
+
)
|
|
1216
|
+
|
|
1217
|
+
return None
|
|
1218
|
+
|
|
1219
|
+
async def _publish_text_as_partial_a2a_status_update(
|
|
1220
|
+
self,
|
|
1221
|
+
text_content: str,
|
|
1222
|
+
a2a_context: Dict,
|
|
1223
|
+
is_stream_terminating_content: bool = False,
|
|
1224
|
+
):
|
|
1225
|
+
"""
|
|
1226
|
+
Constructs and publishes a TaskStatusUpdateEvent for the given text.
|
|
1227
|
+
The 'final' flag is determined by is_stream_terminating_content.
|
|
1228
|
+
This method skips buffer flushing since it's used for LLM streaming text.
|
|
1229
|
+
"""
|
|
1230
|
+
logical_task_id = a2a_context.get("logical_task_id", "unknown_task")
|
|
1231
|
+
log_identifier_helper = (
|
|
1232
|
+
f"{self.log_identifier}[PublishPartialText:{logical_task_id}]"
|
|
1233
|
+
)
|
|
1234
|
+
|
|
1235
|
+
if not text_content:
|
|
1236
|
+
log.debug(
|
|
1237
|
+
"%s No text content to publish as update (final=%s).",
|
|
1238
|
+
log_identifier_helper,
|
|
1239
|
+
is_stream_terminating_content,
|
|
1240
|
+
)
|
|
1241
|
+
return
|
|
1242
|
+
|
|
1243
|
+
try:
|
|
1244
|
+
a2a_message = A2AMessage(role="agent", parts=[TextPart(text=text_content)])
|
|
1245
|
+
task_status = TaskStatus(
|
|
1246
|
+
state=TaskState.WORKING,
|
|
1247
|
+
message=a2a_message,
|
|
1248
|
+
timestamp=datetime.now(timezone.utc),
|
|
1249
|
+
)
|
|
1250
|
+
event_metadata = {"agent_name": self.agent_name}
|
|
1251
|
+
status_update_event = TaskStatusUpdateEvent(
|
|
1252
|
+
id=logical_task_id,
|
|
1253
|
+
status=task_status,
|
|
1254
|
+
final=is_stream_terminating_content,
|
|
1255
|
+
metadata=event_metadata,
|
|
1256
|
+
)
|
|
1257
|
+
|
|
1258
|
+
await self._publish_status_update_with_buffer_flush(
|
|
1259
|
+
status_update_event,
|
|
1260
|
+
a2a_context,
|
|
1261
|
+
skip_buffer_flush=True,
|
|
1262
|
+
)
|
|
1263
|
+
|
|
1264
|
+
log.debug(
|
|
1265
|
+
"%s Published LLM streaming text (length: %d bytes, final: %s).",
|
|
1266
|
+
log_identifier_helper,
|
|
1267
|
+
len(text_content.encode("utf-8")),
|
|
1268
|
+
is_stream_terminating_content,
|
|
1269
|
+
)
|
|
1270
|
+
|
|
1271
|
+
except Exception as e:
|
|
1272
|
+
log.exception(
|
|
1273
|
+
"%s Error in _publish_text_as_partial_a2a_status_update: %s",
|
|
1274
|
+
log_identifier_helper,
|
|
1275
|
+
e,
|
|
1276
|
+
)
|
|
1277
|
+
|
|
1278
|
+
async def _publish_agent_status_signal_update(
|
|
1279
|
+
self, status_text: str, a2a_context: Dict
|
|
1280
|
+
):
|
|
1281
|
+
"""
|
|
1282
|
+
Constructs and publishes a TaskStatusUpdateEvent specifically for agent_status_message signals.
|
|
1283
|
+
This method will flush the buffer before publishing to maintain proper message ordering.
|
|
1284
|
+
"""
|
|
1285
|
+
logical_task_id = a2a_context.get("logical_task_id", "unknown_task")
|
|
1286
|
+
log_identifier_helper = (
|
|
1287
|
+
f"{self.log_identifier}[PublishAgentSignal:{logical_task_id}]"
|
|
1288
|
+
)
|
|
1289
|
+
|
|
1290
|
+
if not status_text:
|
|
1291
|
+
log.debug(
|
|
1292
|
+
"%s No text content for agent status signal.", log_identifier_helper
|
|
1293
|
+
)
|
|
1294
|
+
return
|
|
1295
|
+
|
|
1296
|
+
try:
|
|
1297
|
+
signal_data_part = DataPart(
|
|
1298
|
+
data={
|
|
1299
|
+
"a2a_signal_type": "agent_status_message",
|
|
1300
|
+
"text": status_text,
|
|
1301
|
+
},
|
|
1302
|
+
metadata={"source_embed_type": "status_update"},
|
|
1303
|
+
)
|
|
1304
|
+
a2a_message = A2AMessage(role="agent", parts=[signal_data_part])
|
|
1305
|
+
task_status = TaskStatus(
|
|
1306
|
+
state=TaskState.WORKING,
|
|
1307
|
+
message=a2a_message,
|
|
1308
|
+
timestamp=datetime.now(timezone.utc),
|
|
1309
|
+
)
|
|
1310
|
+
event_metadata = {"agent_name": self.agent_name}
|
|
1311
|
+
status_update_event = TaskStatusUpdateEvent(
|
|
1312
|
+
id=logical_task_id,
|
|
1313
|
+
status=task_status,
|
|
1314
|
+
final=False,
|
|
1315
|
+
metadata=event_metadata,
|
|
1316
|
+
)
|
|
1317
|
+
|
|
1318
|
+
await self._publish_status_update_with_buffer_flush(
|
|
1319
|
+
status_update_event,
|
|
1320
|
+
a2a_context,
|
|
1321
|
+
skip_buffer_flush=False,
|
|
1322
|
+
)
|
|
1323
|
+
|
|
1324
|
+
log.debug(
|
|
1325
|
+
"%s Published agent_status_message signal ('%s').",
|
|
1326
|
+
log_identifier_helper,
|
|
1327
|
+
status_text,
|
|
1328
|
+
)
|
|
1329
|
+
|
|
1330
|
+
except Exception as e:
|
|
1331
|
+
log.exception(
|
|
1332
|
+
"%s Error in _publish_agent_status_signal_update: %s",
|
|
1333
|
+
log_identifier_helper,
|
|
1334
|
+
e,
|
|
1335
|
+
)
|
|
1336
|
+
|
|
1337
|
+
async def _flush_buffer_if_needed(
|
|
1338
|
+
self, a2a_context: Dict, reason: str = "status_update"
|
|
1339
|
+
) -> bool:
|
|
1340
|
+
"""
|
|
1341
|
+
Flushes streaming buffer if it contains content.
|
|
1342
|
+
|
|
1343
|
+
Args:
|
|
1344
|
+
a2a_context: The A2A context dictionary for the current task
|
|
1345
|
+
reason: The reason for flushing (for logging purposes)
|
|
1346
|
+
|
|
1347
|
+
Returns:
|
|
1348
|
+
bool: True if buffer was flushed, False if no content to flush
|
|
1349
|
+
"""
|
|
1350
|
+
logical_task_id = a2a_context.get("logical_task_id", "unknown_task")
|
|
1351
|
+
log_identifier = f"{self.log_identifier}[BufferFlush:{logical_task_id}]"
|
|
1352
|
+
|
|
1353
|
+
with self.active_tasks_lock:
|
|
1354
|
+
task_context = self.active_tasks.get(logical_task_id)
|
|
1355
|
+
|
|
1356
|
+
if not task_context:
|
|
1357
|
+
log.warning(
|
|
1358
|
+
"%s TaskExecutionContext not found for task %s. Cannot flush buffer.",
|
|
1359
|
+
log_identifier,
|
|
1360
|
+
logical_task_id,
|
|
1361
|
+
)
|
|
1362
|
+
return False
|
|
1363
|
+
|
|
1364
|
+
buffer_content = task_context.get_streaming_buffer_content()
|
|
1365
|
+
if not buffer_content:
|
|
1366
|
+
log.debug(
|
|
1367
|
+
"%s No buffer content to flush (reason: %s).",
|
|
1368
|
+
log_identifier,
|
|
1369
|
+
reason,
|
|
1370
|
+
)
|
|
1371
|
+
return False
|
|
1372
|
+
|
|
1373
|
+
buffer_size = len(buffer_content.encode("utf-8"))
|
|
1374
|
+
log.info(
|
|
1375
|
+
"%s Flushing buffer content (size: %d bytes, reason: %s).",
|
|
1376
|
+
log_identifier,
|
|
1377
|
+
buffer_size,
|
|
1378
|
+
reason,
|
|
1379
|
+
)
|
|
1380
|
+
|
|
1381
|
+
try:
|
|
1382
|
+
resolved_text, unprocessed_tail = await self._flush_and_resolve_buffer(
|
|
1383
|
+
a2a_context, is_final=False
|
|
1384
|
+
)
|
|
1385
|
+
|
|
1386
|
+
if resolved_text:
|
|
1387
|
+
await self._publish_text_as_partial_a2a_status_update(
|
|
1388
|
+
resolved_text,
|
|
1389
|
+
a2a_context,
|
|
1390
|
+
is_stream_terminating_content=False,
|
|
1391
|
+
)
|
|
1392
|
+
log.debug(
|
|
1393
|
+
"%s Successfully flushed and published buffer content (resolved: %d bytes).",
|
|
1394
|
+
log_identifier,
|
|
1395
|
+
len(resolved_text.encode("utf-8")),
|
|
1396
|
+
)
|
|
1397
|
+
return True
|
|
1398
|
+
else:
|
|
1399
|
+
log.debug(
|
|
1400
|
+
"%s Buffer flush completed but no resolved text to publish.",
|
|
1401
|
+
log_identifier,
|
|
1402
|
+
)
|
|
1403
|
+
return False
|
|
1404
|
+
|
|
1405
|
+
except Exception as e:
|
|
1406
|
+
log.exception(
|
|
1407
|
+
"%s Error during buffer flush (reason: %s): %s",
|
|
1408
|
+
log_identifier,
|
|
1409
|
+
reason,
|
|
1410
|
+
e,
|
|
1411
|
+
)
|
|
1412
|
+
return False
|
|
1413
|
+
|
|
1414
|
+
async def _publish_status_update_with_buffer_flush(
|
|
1415
|
+
self,
|
|
1416
|
+
status_update_event: TaskStatusUpdateEvent,
|
|
1417
|
+
a2a_context: Dict,
|
|
1418
|
+
skip_buffer_flush: bool = False,
|
|
1419
|
+
) -> None:
|
|
1420
|
+
"""
|
|
1421
|
+
Central method for publishing status updates with automatic buffer flushing.
|
|
1422
|
+
|
|
1423
|
+
Args:
|
|
1424
|
+
status_update_event: The status update event to publish
|
|
1425
|
+
a2a_context: The A2A context dictionary for the current task
|
|
1426
|
+
skip_buffer_flush: If True, skip buffer flushing (used for LLM streaming text)
|
|
1427
|
+
"""
|
|
1428
|
+
logical_task_id = a2a_context.get("logical_task_id", "unknown_task")
|
|
1429
|
+
jsonrpc_request_id = a2a_context.get("jsonrpc_request_id")
|
|
1430
|
+
log_identifier = f"{self.log_identifier}[StatusUpdate:{logical_task_id}]"
|
|
1431
|
+
|
|
1432
|
+
status_type = "unknown"
|
|
1433
|
+
if status_update_event.metadata:
|
|
1434
|
+
if status_update_event.metadata.get("type") == "tool_invocation_start":
|
|
1435
|
+
status_type = "tool_invocation_start"
|
|
1436
|
+
elif "agent_name" in status_update_event.metadata:
|
|
1437
|
+
status_type = "agent_status"
|
|
1438
|
+
|
|
1439
|
+
if (
|
|
1440
|
+
status_update_event.status
|
|
1441
|
+
and status_update_event.status.message
|
|
1442
|
+
and status_update_event.status.message.parts
|
|
1443
|
+
):
|
|
1444
|
+
for part in status_update_event.status.message.parts:
|
|
1445
|
+
if hasattr(part, "data") and part.data:
|
|
1446
|
+
if part.data.get("a2a_signal_type") == "agent_status_message":
|
|
1447
|
+
status_type = "agent_status_signal"
|
|
1448
|
+
break
|
|
1449
|
+
elif "tool_error" in part.data:
|
|
1450
|
+
status_type = "tool_failure"
|
|
1451
|
+
break
|
|
1452
|
+
|
|
1453
|
+
log.debug(
|
|
1454
|
+
"%s Publishing status update (type: %s, skip_buffer_flush: %s).",
|
|
1455
|
+
log_identifier,
|
|
1456
|
+
status_type,
|
|
1457
|
+
skip_buffer_flush,
|
|
1458
|
+
)
|
|
1459
|
+
|
|
1460
|
+
if not skip_buffer_flush:
|
|
1461
|
+
buffer_was_flushed = await self._flush_buffer_if_needed(
|
|
1462
|
+
a2a_context, reason=f"before_{status_type}_status"
|
|
1463
|
+
)
|
|
1464
|
+
if buffer_was_flushed:
|
|
1465
|
+
log.info(
|
|
1466
|
+
"%s Buffer flushed before %s status update.",
|
|
1467
|
+
log_identifier,
|
|
1468
|
+
status_type,
|
|
1469
|
+
)
|
|
1470
|
+
|
|
1471
|
+
try:
|
|
1472
|
+
rpc_response = JSONRPCResponse(
|
|
1473
|
+
id=jsonrpc_request_id, result=status_update_event
|
|
1474
|
+
)
|
|
1475
|
+
payload_to_publish = rpc_response.model_dump(exclude_none=True)
|
|
1476
|
+
|
|
1477
|
+
target_topic = a2a_context.get("statusTopic") or get_gateway_status_topic(
|
|
1478
|
+
self.namespace, self.get_gateway_id(), logical_task_id
|
|
1479
|
+
)
|
|
1480
|
+
|
|
1481
|
+
self._publish_a2a_event(payload_to_publish, target_topic, a2a_context)
|
|
1482
|
+
|
|
1483
|
+
log.info(
|
|
1484
|
+
"%s Published %s status update to %s.",
|
|
1485
|
+
log_identifier,
|
|
1486
|
+
status_type,
|
|
1487
|
+
target_topic,
|
|
1488
|
+
)
|
|
1489
|
+
|
|
1490
|
+
except Exception as e:
|
|
1491
|
+
log.exception(
|
|
1492
|
+
"%s Error publishing %s status update: %s",
|
|
1493
|
+
log_identifier,
|
|
1494
|
+
status_type,
|
|
1495
|
+
e,
|
|
1496
|
+
)
|
|
1497
|
+
raise
|
|
1498
|
+
|
|
1499
|
+
async def _translate_adk_part_to_a2a_filepart(
|
|
1500
|
+
self,
|
|
1501
|
+
adk_part: adk_types.Part,
|
|
1502
|
+
filename: str,
|
|
1503
|
+
a2a_context: Dict,
|
|
1504
|
+
version: Optional[int] = None,
|
|
1505
|
+
) -> Optional[FilePart]:
|
|
1506
|
+
"""
|
|
1507
|
+
Translates a loaded ADK Part (with inline_data) to an A2A FilePart
|
|
1508
|
+
based on the configured artifact_handling_mode.
|
|
1509
|
+
If version is not provided, it will be resolved to the latest.
|
|
1510
|
+
"""
|
|
1511
|
+
if self.artifact_handling_mode == "ignore":
|
|
1512
|
+
log.debug(
|
|
1513
|
+
"%s Artifact handling mode is 'ignore'. Skipping translation for '%s'.",
|
|
1514
|
+
self.log_identifier,
|
|
1515
|
+
filename,
|
|
1516
|
+
)
|
|
1517
|
+
return None
|
|
1518
|
+
|
|
1519
|
+
if not adk_part or not adk_part.inline_data:
|
|
1520
|
+
log.warning(
|
|
1521
|
+
"%s Cannot translate artifact '%s': ADK Part is missing or has no inline_data.",
|
|
1522
|
+
self.log_identifier,
|
|
1523
|
+
filename,
|
|
1524
|
+
)
|
|
1525
|
+
return None
|
|
1526
|
+
|
|
1527
|
+
resolved_version = version
|
|
1528
|
+
if resolved_version is None:
|
|
1529
|
+
try:
|
|
1530
|
+
resolved_version = await get_latest_artifact_version(
|
|
1531
|
+
artifact_service=self.artifact_service,
|
|
1532
|
+
app_name=self.get_config("agent_name"),
|
|
1533
|
+
user_id=a2a_context.get("user_id"),
|
|
1534
|
+
session_id=a2a_context.get("session_id"),
|
|
1535
|
+
filename=filename,
|
|
1536
|
+
)
|
|
1537
|
+
if resolved_version is None:
|
|
1538
|
+
log.error(
|
|
1539
|
+
"%s Could not resolve latest version for artifact '%s'.",
|
|
1540
|
+
self.log_identifier,
|
|
1541
|
+
filename,
|
|
1542
|
+
)
|
|
1543
|
+
return None
|
|
1544
|
+
except Exception as e:
|
|
1545
|
+
log.exception(
|
|
1546
|
+
"%s Failed to resolve latest version for artifact '%s': %s",
|
|
1547
|
+
self.log_identifier,
|
|
1548
|
+
filename,
|
|
1549
|
+
e,
|
|
1550
|
+
)
|
|
1551
|
+
return None
|
|
1552
|
+
|
|
1553
|
+
mime_type = adk_part.inline_data.mime_type
|
|
1554
|
+
data_bytes = adk_part.inline_data.data
|
|
1555
|
+
file_content: Optional[FileContent] = None
|
|
1556
|
+
|
|
1557
|
+
try:
|
|
1558
|
+
if self.artifact_handling_mode == "embed":
|
|
1559
|
+
encoded_bytes = base64.b64encode(data_bytes).decode("utf-8")
|
|
1560
|
+
file_content = FileContent(
|
|
1561
|
+
name=filename, mimeType=mime_type, bytes=encoded_bytes
|
|
1562
|
+
)
|
|
1563
|
+
log.debug(
|
|
1564
|
+
"%s Embedding artifact '%s' (size: %d bytes) for A2A message.",
|
|
1565
|
+
self.log_identifier,
|
|
1566
|
+
filename,
|
|
1567
|
+
len(data_bytes),
|
|
1568
|
+
)
|
|
1569
|
+
|
|
1570
|
+
elif self.artifact_handling_mode == "reference":
|
|
1571
|
+
adk_app_name = self.get_config("agent_name")
|
|
1572
|
+
user_id = a2a_context.get("user_id")
|
|
1573
|
+
original_session_id = a2a_context.get("session_id")
|
|
1574
|
+
|
|
1575
|
+
if not all([adk_app_name, user_id, original_session_id]):
|
|
1576
|
+
log.error(
|
|
1577
|
+
"%s Cannot create artifact reference URI: missing context (app_name, user_id, or session_id).",
|
|
1578
|
+
self.log_identifier,
|
|
1579
|
+
)
|
|
1580
|
+
return None
|
|
1581
|
+
|
|
1582
|
+
artifact_uri = f"artifact://{adk_app_name}/{user_id}/{original_session_id}/{filename}?version={resolved_version}"
|
|
1583
|
+
|
|
1584
|
+
log.info(
|
|
1585
|
+
"%s Creating reference URI for artifact: %s",
|
|
1586
|
+
self.log_identifier,
|
|
1587
|
+
artifact_uri,
|
|
1588
|
+
)
|
|
1589
|
+
file_content = FileContent(
|
|
1590
|
+
name=filename, mimeType=mime_type, uri=artifact_uri
|
|
1591
|
+
)
|
|
1592
|
+
|
|
1593
|
+
if file_content:
|
|
1594
|
+
return FilePart(file=file_content)
|
|
1595
|
+
else:
|
|
1596
|
+
log.warning(
|
|
1597
|
+
"%s No FileContent created for artifact '%s' despite mode '%s'.",
|
|
1598
|
+
self.log_identifier,
|
|
1599
|
+
filename,
|
|
1600
|
+
self.artifact_handling_mode,
|
|
1601
|
+
)
|
|
1602
|
+
return None
|
|
1603
|
+
|
|
1604
|
+
except Exception as e:
|
|
1605
|
+
log.exception(
|
|
1606
|
+
"%s Error translating artifact '%s' to A2A FilePart (mode: %s): %s",
|
|
1607
|
+
self.log_identifier,
|
|
1608
|
+
filename,
|
|
1609
|
+
self.artifact_handling_mode,
|
|
1610
|
+
e,
|
|
1611
|
+
)
|
|
1612
|
+
return None
|
|
1613
|
+
|
|
1614
|
+
async def _filter_text_from_final_streaming_event(
|
|
1615
|
+
self, adk_event: ADKEvent, a2a_context: Dict
|
|
1616
|
+
) -> ADKEvent:
|
|
1617
|
+
"""
|
|
1618
|
+
Filters out text parts from the final ADKEvent of a turn for PERSISTENT streaming sessions.
|
|
1619
|
+
This prevents sending redundant, aggregated text that was already streamed.
|
|
1620
|
+
Non-text parts like function calls are preserved.
|
|
1621
|
+
"""
|
|
1622
|
+
is_run_based_session = a2a_context.get("is_run_based_session", False)
|
|
1623
|
+
is_streaming = a2a_context.get("is_streaming", False)
|
|
1624
|
+
is_final_turn_event = not adk_event.partial
|
|
1625
|
+
has_content_parts = adk_event.content and adk_event.content.parts
|
|
1626
|
+
|
|
1627
|
+
# Only filter for PERSISTENT (not run-based) streaming sessions.
|
|
1628
|
+
if (
|
|
1629
|
+
not is_run_based_session
|
|
1630
|
+
and is_streaming
|
|
1631
|
+
and is_final_turn_event
|
|
1632
|
+
and has_content_parts
|
|
1633
|
+
):
|
|
1634
|
+
log_id = f"{self.log_identifier}[FilterFinalStreamEvent:{a2a_context.get('logical_task_id', 'unknown')}]"
|
|
1635
|
+
log.debug(
|
|
1636
|
+
"%s Filtering final streaming event to remove redundant text.", log_id
|
|
1637
|
+
)
|
|
1638
|
+
|
|
1639
|
+
non_text_parts = [
|
|
1640
|
+
part for part in adk_event.content.parts if part.text is None
|
|
1641
|
+
]
|
|
1642
|
+
|
|
1643
|
+
if len(non_text_parts) < len(adk_event.content.parts):
|
|
1644
|
+
event_copy = adk_event.model_copy(deep=True)
|
|
1645
|
+
event_copy.content = (
|
|
1646
|
+
adk_types.Content(parts=non_text_parts) if non_text_parts else None
|
|
1647
|
+
)
|
|
1648
|
+
log.info(
|
|
1649
|
+
"%s Removed text from final streaming event. Kept %d non-text part(s).",
|
|
1650
|
+
log_id,
|
|
1651
|
+
len(non_text_parts),
|
|
1652
|
+
)
|
|
1653
|
+
return event_copy
|
|
1654
|
+
|
|
1655
|
+
return adk_event
|
|
1656
|
+
|
|
1657
|
+
async def process_and_publish_adk_event(
|
|
1658
|
+
self, adk_event: ADKEvent, a2a_context: Dict
|
|
1659
|
+
):
|
|
1660
|
+
"""
|
|
1661
|
+
Main orchestrator for processing ADK events.
|
|
1662
|
+
Handles text buffering, embed resolution, and event routing based on
|
|
1663
|
+
whether the event is partial or the final event of a turn.
|
|
1664
|
+
"""
|
|
1665
|
+
logical_task_id = a2a_context.get("logical_task_id", "unknown_task")
|
|
1666
|
+
log_id_main = (
|
|
1667
|
+
f"{self.log_identifier}[ProcessADKEvent:{logical_task_id}:{adk_event.id}]"
|
|
1668
|
+
)
|
|
1669
|
+
log.debug(
|
|
1670
|
+
"%s Received ADKEvent (Partial: %s, Final Turn: %s).",
|
|
1671
|
+
log_id_main,
|
|
1672
|
+
adk_event.partial,
|
|
1673
|
+
not adk_event.partial,
|
|
1674
|
+
)
|
|
1675
|
+
|
|
1676
|
+
if adk_event.content and adk_event.content.parts:
|
|
1677
|
+
if any(
|
|
1678
|
+
p.function_response
|
|
1679
|
+
and p.function_response.name == "_continue_generation"
|
|
1680
|
+
for p in adk_event.content.parts
|
|
1681
|
+
):
|
|
1682
|
+
log.debug(
|
|
1683
|
+
"%s Discarding _continue_generation tool response event.",
|
|
1684
|
+
log_id_main,
|
|
1685
|
+
)
|
|
1686
|
+
return
|
|
1687
|
+
|
|
1688
|
+
if adk_event.custom_metadata and adk_event.custom_metadata.get(
|
|
1689
|
+
"was_interrupted"
|
|
1690
|
+
):
|
|
1691
|
+
log.debug(
|
|
1692
|
+
"%s Found 'was_interrupted' signal. Skipping event.",
|
|
1693
|
+
log_id_main,
|
|
1694
|
+
)
|
|
1695
|
+
return
|
|
1696
|
+
|
|
1697
|
+
with self.active_tasks_lock:
|
|
1698
|
+
task_context = self.active_tasks.get(logical_task_id)
|
|
1699
|
+
|
|
1700
|
+
if not task_context:
|
|
1701
|
+
log.error(
|
|
1702
|
+
"%s TaskExecutionContext not found for task %s. Cannot process ADK event.",
|
|
1703
|
+
log_id_main,
|
|
1704
|
+
logical_task_id,
|
|
1705
|
+
)
|
|
1706
|
+
return
|
|
1707
|
+
|
|
1708
|
+
is_run_based_session = a2a_context.get("is_run_based_session", False)
|
|
1709
|
+
is_final_turn_event = not adk_event.partial
|
|
1710
|
+
|
|
1711
|
+
if not is_final_turn_event:
|
|
1712
|
+
if adk_event.content and adk_event.content.parts:
|
|
1713
|
+
for part in adk_event.content.parts:
|
|
1714
|
+
if part.text is not None:
|
|
1715
|
+
task_context.append_to_streaming_buffer(part.text)
|
|
1716
|
+
log.debug(
|
|
1717
|
+
"%s Appended text to buffer. New buffer size: %d bytes",
|
|
1718
|
+
log_id_main,
|
|
1719
|
+
len(
|
|
1720
|
+
task_context.get_streaming_buffer_content().encode(
|
|
1721
|
+
"utf-8"
|
|
1722
|
+
)
|
|
1723
|
+
),
|
|
1724
|
+
)
|
|
1725
|
+
|
|
1726
|
+
buffer_content = task_context.get_streaming_buffer_content()
|
|
1727
|
+
batching_disabled = self.stream_batching_threshold_bytes <= 0
|
|
1728
|
+
buffer_has_content = bool(buffer_content)
|
|
1729
|
+
threshold_met = (
|
|
1730
|
+
buffer_has_content
|
|
1731
|
+
and not batching_disabled
|
|
1732
|
+
and (
|
|
1733
|
+
len(buffer_content.encode("utf-8"))
|
|
1734
|
+
>= self.stream_batching_threshold_bytes
|
|
1735
|
+
)
|
|
1736
|
+
)
|
|
1737
|
+
|
|
1738
|
+
if buffer_has_content and (batching_disabled or threshold_met):
|
|
1739
|
+
log.info(
|
|
1740
|
+
"%s Partial event triggered buffer flush due to size/batching config.",
|
|
1741
|
+
log_id_main,
|
|
1742
|
+
)
|
|
1743
|
+
resolved_text, _ = await self._flush_and_resolve_buffer(
|
|
1744
|
+
a2a_context, is_final=False
|
|
1745
|
+
)
|
|
1746
|
+
|
|
1747
|
+
if resolved_text:
|
|
1748
|
+
if is_run_based_session:
|
|
1749
|
+
task_context.append_to_run_based_buffer(resolved_text)
|
|
1750
|
+
log.debug(
|
|
1751
|
+
"%s [RUN_BASED] Appended %d bytes to run_based_response_buffer.",
|
|
1752
|
+
log_id_main,
|
|
1753
|
+
len(resolved_text.encode("utf-8")),
|
|
1754
|
+
)
|
|
1755
|
+
else:
|
|
1756
|
+
await self._publish_text_as_partial_a2a_status_update(
|
|
1757
|
+
resolved_text, a2a_context
|
|
1758
|
+
)
|
|
1759
|
+
else:
|
|
1760
|
+
buffer_content = task_context.get_streaming_buffer_content()
|
|
1761
|
+
if buffer_content:
|
|
1762
|
+
log.info(
|
|
1763
|
+
"%s Final event triggered flush of remaining buffer content.",
|
|
1764
|
+
log_id_main,
|
|
1765
|
+
)
|
|
1766
|
+
resolved_text, _ = await self._flush_and_resolve_buffer(
|
|
1767
|
+
a2a_context, is_final=True
|
|
1768
|
+
)
|
|
1769
|
+
if resolved_text:
|
|
1770
|
+
if is_run_based_session:
|
|
1771
|
+
task_context.append_to_run_based_buffer(resolved_text)
|
|
1772
|
+
log.debug(
|
|
1773
|
+
"%s [RUN_BASED] Appended final %d bytes to run_based_response_buffer.",
|
|
1774
|
+
log_id_main,
|
|
1775
|
+
len(resolved_text.encode("utf-8")),
|
|
1776
|
+
)
|
|
1777
|
+
else:
|
|
1778
|
+
await self._publish_text_as_partial_a2a_status_update(
|
|
1779
|
+
resolved_text, a2a_context
|
|
1780
|
+
)
|
|
1781
|
+
|
|
1782
|
+
# Prepare and publish the final event for observability
|
|
1783
|
+
event_to_publish = await self._filter_text_from_final_streaming_event(
|
|
1784
|
+
adk_event, a2a_context
|
|
1785
|
+
)
|
|
1786
|
+
|
|
1787
|
+
(
|
|
1788
|
+
a2a_payload,
|
|
1789
|
+
target_topic,
|
|
1790
|
+
user_properties,
|
|
1791
|
+
_,
|
|
1792
|
+
) = await format_and_route_adk_event(event_to_publish, a2a_context, self)
|
|
1793
|
+
|
|
1794
|
+
if a2a_payload and target_topic:
|
|
1795
|
+
self._publish_a2a_event(a2a_payload, target_topic, a2a_context)
|
|
1796
|
+
log.info(
|
|
1797
|
+
"%s Published final turn event (e.g., tool call) to %s.",
|
|
1798
|
+
log_id_main,
|
|
1799
|
+
target_topic,
|
|
1800
|
+
)
|
|
1801
|
+
else:
|
|
1802
|
+
log.debug(
|
|
1803
|
+
"%s Final turn event did not result in a publishable A2A message.",
|
|
1804
|
+
log_id_main,
|
|
1805
|
+
)
|
|
1806
|
+
|
|
1807
|
+
await self._handle_artifact_return_signals(adk_event, a2a_context)
|
|
1808
|
+
|
|
1809
|
+
async def _flush_and_resolve_buffer(
|
|
1810
|
+
self, a2a_context: Dict, is_final: bool
|
|
1811
|
+
) -> Tuple[str, str]:
|
|
1812
|
+
"""Flushes buffer, resolves embeds, handles signals, returns (resolved_text, unprocessed_tail)."""
|
|
1813
|
+
logical_task_id = a2a_context.get("logical_task_id", "unknown_task")
|
|
1814
|
+
log_id = f"{self.log_identifier}[FlushBuffer:{logical_task_id}]"
|
|
1815
|
+
|
|
1816
|
+
with self.active_tasks_lock:
|
|
1817
|
+
task_context = self.active_tasks.get(logical_task_id)
|
|
1818
|
+
|
|
1819
|
+
if not task_context:
|
|
1820
|
+
log.error(
|
|
1821
|
+
"%s TaskExecutionContext not found for task %s. Cannot flush/resolve buffer.",
|
|
1822
|
+
log_id,
|
|
1823
|
+
logical_task_id,
|
|
1824
|
+
)
|
|
1825
|
+
return "", ""
|
|
1826
|
+
|
|
1827
|
+
text_to_process = task_context.flush_streaming_buffer()
|
|
1828
|
+
|
|
1829
|
+
resolved_text, signals_found, unprocessed_tail = (
|
|
1830
|
+
await self._resolve_early_embeds_and_handle_signals(
|
|
1831
|
+
text_to_process, a2a_context
|
|
1832
|
+
)
|
|
1833
|
+
)
|
|
1834
|
+
|
|
1835
|
+
if not is_final:
|
|
1836
|
+
if unprocessed_tail:
|
|
1837
|
+
task_context.append_to_streaming_buffer(unprocessed_tail)
|
|
1838
|
+
log.debug(
|
|
1839
|
+
"%s Placed unprocessed tail (length %d) back into buffer.",
|
|
1840
|
+
log_id,
|
|
1841
|
+
len(unprocessed_tail.encode("utf-8")),
|
|
1842
|
+
)
|
|
1843
|
+
else:
|
|
1844
|
+
resolved_text = resolved_text + unprocessed_tail
|
|
1845
|
+
|
|
1846
|
+
if signals_found:
|
|
1847
|
+
log.info(
|
|
1848
|
+
"%s Handling %d signals from buffer resolution.",
|
|
1849
|
+
log_id,
|
|
1850
|
+
len(signals_found),
|
|
1851
|
+
)
|
|
1852
|
+
for _signal_index, signal_data_tuple in signals_found:
|
|
1853
|
+
if (
|
|
1854
|
+
isinstance(signal_data_tuple, tuple)
|
|
1855
|
+
and len(signal_data_tuple) == 3
|
|
1856
|
+
and signal_data_tuple[0] is None
|
|
1857
|
+
and signal_data_tuple[1] == "SIGNAL_STATUS_UPDATE"
|
|
1858
|
+
):
|
|
1859
|
+
status_text = signal_data_tuple[2]
|
|
1860
|
+
log.info(
|
|
1861
|
+
"%s Publishing SIGNAL_STATUS_UPDATE from buffer: '%s'",
|
|
1862
|
+
log_id,
|
|
1863
|
+
status_text,
|
|
1864
|
+
)
|
|
1865
|
+
await self._publish_agent_status_signal_update(
|
|
1866
|
+
status_text, a2a_context
|
|
1867
|
+
)
|
|
1868
|
+
|
|
1869
|
+
return resolved_text, unprocessed_tail
|
|
1870
|
+
|
|
1871
|
+
async def _handle_artifact_return_signals(
|
|
1872
|
+
self, adk_event: ADKEvent, a2a_context: Dict
|
|
1873
|
+
):
|
|
1874
|
+
"""
|
|
1875
|
+
Processes artifact return signals.
|
|
1876
|
+
This method is triggered by a placeholder in state_delta, but reads the
|
|
1877
|
+
actual list of signals from the TaskExecutionContext.
|
|
1878
|
+
"""
|
|
1879
|
+
logical_task_id = a2a_context.get("logical_task_id", "unknown_task")
|
|
1880
|
+
log_id = f"{self.log_identifier}[ArtifactSignals:{logical_task_id}]"
|
|
1881
|
+
|
|
1882
|
+
# Check for the trigger in state_delta. The presence of any key is enough.
|
|
1883
|
+
has_signal_trigger = (
|
|
1884
|
+
adk_event.actions
|
|
1885
|
+
and adk_event.actions.state_delta
|
|
1886
|
+
and any(
|
|
1887
|
+
k.startswith("temp:a2a_return_artifact:")
|
|
1888
|
+
for k in adk_event.actions.state_delta
|
|
1889
|
+
)
|
|
1890
|
+
)
|
|
1891
|
+
|
|
1892
|
+
if not has_signal_trigger:
|
|
1893
|
+
return
|
|
1894
|
+
|
|
1895
|
+
with self.active_tasks_lock:
|
|
1896
|
+
task_context = self.active_tasks.get(logical_task_id)
|
|
1897
|
+
|
|
1898
|
+
if not task_context:
|
|
1899
|
+
log.warning(
|
|
1900
|
+
"%s No TaskExecutionContext found for task %s. Cannot process artifact signals.",
|
|
1901
|
+
log_id,
|
|
1902
|
+
logical_task_id,
|
|
1903
|
+
)
|
|
1904
|
+
return
|
|
1905
|
+
|
|
1906
|
+
all_signals = task_context.get_and_clear_artifact_signals()
|
|
1907
|
+
|
|
1908
|
+
if not all_signals:
|
|
1909
|
+
log.info(
|
|
1910
|
+
"%s Triggered for artifact signals, but none were found in the execution context.",
|
|
1911
|
+
log_id,
|
|
1912
|
+
)
|
|
1913
|
+
return
|
|
1914
|
+
|
|
1915
|
+
log.info(
|
|
1916
|
+
"%s Found %d artifact return signal(s) in the execution context.",
|
|
1917
|
+
log_id,
|
|
1918
|
+
len(all_signals),
|
|
1919
|
+
)
|
|
1920
|
+
|
|
1921
|
+
original_session_id = a2a_context.get("session_id")
|
|
1922
|
+
user_id = a2a_context.get("user_id")
|
|
1923
|
+
adk_app_name = self.get_config("agent_name")
|
|
1924
|
+
|
|
1925
|
+
peer_status_topic = a2a_context.get("statusTopic")
|
|
1926
|
+
namespace = self.get_config("namespace")
|
|
1927
|
+
gateway_id = self.get_gateway_id()
|
|
1928
|
+
|
|
1929
|
+
artifact_topic = peer_status_topic or get_gateway_status_topic(
|
|
1930
|
+
namespace, gateway_id, logical_task_id
|
|
1931
|
+
)
|
|
1932
|
+
|
|
1933
|
+
if not self.artifact_service:
|
|
1934
|
+
log.error("%s Artifact service not available.", log_id)
|
|
1935
|
+
return
|
|
1936
|
+
if not artifact_topic:
|
|
1937
|
+
log.error("%s Could not determine artifact topic.", log_id)
|
|
1938
|
+
return
|
|
1939
|
+
|
|
1940
|
+
for item in all_signals:
|
|
1941
|
+
try:
|
|
1942
|
+
filename = item["filename"]
|
|
1943
|
+
version = item["version"]
|
|
1944
|
+
|
|
1945
|
+
log.info(
|
|
1946
|
+
"%s Processing artifact return signal for '%s' v%d from context.",
|
|
1947
|
+
log_id,
|
|
1948
|
+
filename,
|
|
1949
|
+
version,
|
|
1950
|
+
)
|
|
1951
|
+
|
|
1952
|
+
loaded_adk_part = await self.artifact_service.load_artifact(
|
|
1953
|
+
app_name=adk_app_name,
|
|
1954
|
+
user_id=user_id,
|
|
1955
|
+
session_id=original_session_id,
|
|
1956
|
+
filename=filename,
|
|
1957
|
+
version=version,
|
|
1958
|
+
)
|
|
1959
|
+
|
|
1960
|
+
if not loaded_adk_part:
|
|
1961
|
+
log.warning(
|
|
1962
|
+
"%s Failed to load artifact '%s' v%d.",
|
|
1963
|
+
log_id,
|
|
1964
|
+
filename,
|
|
1965
|
+
version,
|
|
1966
|
+
)
|
|
1967
|
+
continue
|
|
1968
|
+
|
|
1969
|
+
a2a_file_part = await self._translate_adk_part_to_a2a_filepart(
|
|
1970
|
+
loaded_adk_part, filename, a2a_context, version=version
|
|
1971
|
+
)
|
|
1972
|
+
|
|
1973
|
+
if a2a_file_part:
|
|
1974
|
+
a2a_artifact = A2AArtifact(name=filename, parts=[a2a_file_part])
|
|
1975
|
+
artifact_update_event = TaskArtifactUpdateEvent(
|
|
1976
|
+
id=logical_task_id, artifact=a2a_artifact
|
|
1977
|
+
)
|
|
1978
|
+
artifact_payload = JSONRPCResponse(
|
|
1979
|
+
id=a2a_context.get("jsonrpc_request_id"),
|
|
1980
|
+
result=artifact_update_event,
|
|
1981
|
+
).model_dump(exclude_none=True)
|
|
1982
|
+
|
|
1983
|
+
self._publish_a2a_event(
|
|
1984
|
+
artifact_payload, artifact_topic, a2a_context
|
|
1985
|
+
)
|
|
1986
|
+
|
|
1987
|
+
log.info(
|
|
1988
|
+
"%s Published TaskArtifactUpdateEvent for '%s' to %s",
|
|
1989
|
+
log_id,
|
|
1990
|
+
filename,
|
|
1991
|
+
artifact_topic,
|
|
1992
|
+
)
|
|
1993
|
+
else:
|
|
1994
|
+
log.warning(
|
|
1995
|
+
"%s Failed to translate artifact '%s' v%d to A2A FilePart.",
|
|
1996
|
+
log_id,
|
|
1997
|
+
filename,
|
|
1998
|
+
version,
|
|
1999
|
+
)
|
|
2000
|
+
|
|
2001
|
+
except Exception as e:
|
|
2002
|
+
log.exception(
|
|
2003
|
+
"%s Error processing artifact signal item %s from context: %s",
|
|
2004
|
+
log_id,
|
|
2005
|
+
item,
|
|
2006
|
+
e,
|
|
2007
|
+
)
|
|
2008
|
+
|
|
2009
|
+
def _format_final_task_status(self, last_event: ADKEvent) -> TaskStatus:
|
|
2010
|
+
"""Helper to format the final TaskStatus based on the last ADK event."""
|
|
2011
|
+
log.debug(
|
|
2012
|
+
"%s Formatting final task status from last ADK event %s",
|
|
2013
|
+
self.log_identifier,
|
|
2014
|
+
last_event.id,
|
|
2015
|
+
)
|
|
2016
|
+
a2a_state = TaskState.COMPLETED
|
|
2017
|
+
a2a_parts = []
|
|
2018
|
+
|
|
2019
|
+
if last_event.content and last_event.content.parts:
|
|
2020
|
+
for part in last_event.content.parts:
|
|
2021
|
+
if part.text:
|
|
2022
|
+
a2a_parts.append(TextPart(text=part.text))
|
|
2023
|
+
elif part.function_response:
|
|
2024
|
+
try:
|
|
2025
|
+
response_data = part.function_response.response
|
|
2026
|
+
if isinstance(response_data, dict):
|
|
2027
|
+
a2a_parts.append(
|
|
2028
|
+
DataPart(
|
|
2029
|
+
data=response_data,
|
|
2030
|
+
metadata={"tool_name": part.function_response.name},
|
|
2031
|
+
)
|
|
2032
|
+
)
|
|
2033
|
+
else:
|
|
2034
|
+
a2a_parts.append(
|
|
2035
|
+
TextPart(
|
|
2036
|
+
text=f"Tool {part.function_response.name} result: {str(response_data)}"
|
|
2037
|
+
)
|
|
2038
|
+
)
|
|
2039
|
+
except Exception:
|
|
2040
|
+
a2a_parts.append(
|
|
2041
|
+
TextPart(
|
|
2042
|
+
text=f"[Tool {part.function_response.name} result omitted]"
|
|
2043
|
+
)
|
|
2044
|
+
)
|
|
2045
|
+
|
|
2046
|
+
elif last_event.actions:
|
|
2047
|
+
if last_event.actions.requested_auth_configs:
|
|
2048
|
+
a2a_state = TaskState.INPUT_REQUIRED
|
|
2049
|
+
a2a_parts.append(TextPart(text="[Agent requires input/authentication]"))
|
|
2050
|
+
|
|
2051
|
+
if not a2a_parts:
|
|
2052
|
+
a2a_parts.append(TextPart(text=""))
|
|
2053
|
+
|
|
2054
|
+
a2a_message = A2AMessage(role="agent", parts=a2a_parts)
|
|
2055
|
+
return TaskStatus(state=a2a_state, message=a2a_message)
|
|
2056
|
+
|
|
2057
|
+
async def finalize_task_success(self, a2a_context: Dict):
|
|
2058
|
+
"""
|
|
2059
|
+
Finalizes a task successfully. Fetches final state, publishes final A2A response,
|
|
2060
|
+
and ACKs the original message.
|
|
2061
|
+
For RUN_BASED tasks, it uses the aggregated response buffer.
|
|
2062
|
+
For STREAMING tasks, it uses the content of the last ADK event.
|
|
2063
|
+
"""
|
|
2064
|
+
logical_task_id = a2a_context.get("logical_task_id")
|
|
2065
|
+
original_message: Optional[SolaceMessage] = a2a_context.get(
|
|
2066
|
+
"original_solace_message"
|
|
2067
|
+
)
|
|
2068
|
+
log.info(
|
|
2069
|
+
"%s Finalizing task %s successfully.", self.log_identifier, logical_task_id
|
|
2070
|
+
)
|
|
2071
|
+
try:
|
|
2072
|
+
session_id_to_retrieve = a2a_context.get(
|
|
2073
|
+
"effective_session_id", a2a_context.get("session_id")
|
|
2074
|
+
)
|
|
2075
|
+
original_session_id = a2a_context.get("session_id")
|
|
2076
|
+
user_id = a2a_context.get("user_id")
|
|
2077
|
+
client_id = a2a_context.get("client_id")
|
|
2078
|
+
jsonrpc_request_id = a2a_context.get("jsonrpc_request_id")
|
|
2079
|
+
peer_reply_topic = a2a_context.get("replyToTopic")
|
|
2080
|
+
namespace = self.get_config("namespace")
|
|
2081
|
+
agent_name = self.get_config("agent_name")
|
|
2082
|
+
is_run_based_session = a2a_context.get("is_run_based_session", False)
|
|
2083
|
+
|
|
2084
|
+
final_status: TaskStatus
|
|
2085
|
+
|
|
2086
|
+
with self.active_tasks_lock:
|
|
2087
|
+
task_context = self.active_tasks.get(logical_task_id)
|
|
2088
|
+
|
|
2089
|
+
final_adk_session = await self.session_service.get_session(
|
|
2090
|
+
app_name=agent_name,
|
|
2091
|
+
user_id=user_id,
|
|
2092
|
+
session_id=session_id_to_retrieve,
|
|
2093
|
+
)
|
|
2094
|
+
if not final_adk_session:
|
|
2095
|
+
raise RuntimeError(
|
|
2096
|
+
f"Could not retrieve final session state for {session_id_to_retrieve}"
|
|
2097
|
+
)
|
|
2098
|
+
|
|
2099
|
+
last_event = (
|
|
2100
|
+
final_adk_session.events[-1] if final_adk_session.events else None
|
|
2101
|
+
)
|
|
2102
|
+
|
|
2103
|
+
if is_run_based_session:
|
|
2104
|
+
aggregated_text = ""
|
|
2105
|
+
if task_context:
|
|
2106
|
+
aggregated_text = task_context.run_based_response_buffer
|
|
2107
|
+
log.info(
|
|
2108
|
+
"%s Using aggregated response buffer for RUN_BASED task %s (length: %d bytes).",
|
|
2109
|
+
self.log_identifier,
|
|
2110
|
+
logical_task_id,
|
|
2111
|
+
len(aggregated_text.encode("utf-8")),
|
|
2112
|
+
)
|
|
2113
|
+
|
|
2114
|
+
final_a2a_parts = []
|
|
2115
|
+
if aggregated_text:
|
|
2116
|
+
final_a2a_parts.append(TextPart(text=aggregated_text))
|
|
2117
|
+
|
|
2118
|
+
if last_event and last_event.content and last_event.content.parts:
|
|
2119
|
+
for part in last_event.content.parts:
|
|
2120
|
+
if part.text is None:
|
|
2121
|
+
if part.function_response:
|
|
2122
|
+
try:
|
|
2123
|
+
response_data = part.function_response.response
|
|
2124
|
+
if isinstance(response_data, dict):
|
|
2125
|
+
final_a2a_parts.append(
|
|
2126
|
+
DataPart(
|
|
2127
|
+
data=response_data,
|
|
2128
|
+
metadata={
|
|
2129
|
+
"tool_name": part.function_response.name
|
|
2130
|
+
},
|
|
2131
|
+
)
|
|
2132
|
+
)
|
|
2133
|
+
else:
|
|
2134
|
+
final_a2a_parts.append(
|
|
2135
|
+
TextPart(
|
|
2136
|
+
text=f"Tool {part.function_response.name} result: {str(response_data)}"
|
|
2137
|
+
)
|
|
2138
|
+
)
|
|
2139
|
+
except Exception:
|
|
2140
|
+
final_a2a_parts.append(
|
|
2141
|
+
TextPart(
|
|
2142
|
+
text=f"[Tool {part.function_response.name} result omitted]"
|
|
2143
|
+
)
|
|
2144
|
+
)
|
|
2145
|
+
|
|
2146
|
+
if not final_a2a_parts:
|
|
2147
|
+
final_a2a_parts.append(TextPart(text=""))
|
|
2148
|
+
|
|
2149
|
+
final_status = TaskStatus(
|
|
2150
|
+
state=TaskState.COMPLETED,
|
|
2151
|
+
message=A2AMessage(role="agent", parts=final_a2a_parts),
|
|
2152
|
+
)
|
|
2153
|
+
else:
|
|
2154
|
+
if last_event:
|
|
2155
|
+
final_status = self._format_final_task_status(last_event)
|
|
2156
|
+
else:
|
|
2157
|
+
final_status = TaskStatus(
|
|
2158
|
+
state=TaskState.COMPLETED,
|
|
2159
|
+
message=A2AMessage(
|
|
2160
|
+
role="agent", parts=[TextPart(text="Task completed.")]
|
|
2161
|
+
),
|
|
2162
|
+
)
|
|
2163
|
+
|
|
2164
|
+
final_a2a_artifacts: List[A2AArtifact] = []
|
|
2165
|
+
log.debug(
|
|
2166
|
+
"%s Final artifact bundling is removed. Artifacts sent via TaskArtifactUpdateEvent.",
|
|
2167
|
+
self.log_identifier,
|
|
2168
|
+
)
|
|
2169
|
+
|
|
2170
|
+
final_task_metadata = {"agent_name": agent_name}
|
|
2171
|
+
if task_context and task_context.produced_artifacts:
|
|
2172
|
+
final_task_metadata["produced_artifacts"] = (
|
|
2173
|
+
task_context.produced_artifacts
|
|
2174
|
+
)
|
|
2175
|
+
log.info(
|
|
2176
|
+
"%s Attaching manifest of %d produced artifacts to final task metadata.",
|
|
2177
|
+
self.log_identifier,
|
|
2178
|
+
len(task_context.produced_artifacts),
|
|
2179
|
+
)
|
|
2180
|
+
|
|
2181
|
+
final_task = Task(
|
|
2182
|
+
id=logical_task_id,
|
|
2183
|
+
sessionId=original_session_id,
|
|
2184
|
+
status=final_status,
|
|
2185
|
+
artifacts=(final_a2a_artifacts if final_a2a_artifacts else None),
|
|
2186
|
+
metadata=final_task_metadata,
|
|
2187
|
+
)
|
|
2188
|
+
final_response = JSONRPCResponse(id=jsonrpc_request_id, result=final_task)
|
|
2189
|
+
a2a_payload = final_response.model_dump(exclude_none=True)
|
|
2190
|
+
target_topic = peer_reply_topic or get_client_response_topic(
|
|
2191
|
+
namespace, client_id
|
|
2192
|
+
)
|
|
2193
|
+
|
|
2194
|
+
self._publish_a2a_event(a2a_payload, target_topic, a2a_context)
|
|
2195
|
+
log.info(
|
|
2196
|
+
"%s Published final successful response for task %s to %s (Artifacts NOT bundled).",
|
|
2197
|
+
self.log_identifier,
|
|
2198
|
+
logical_task_id,
|
|
2199
|
+
target_topic,
|
|
2200
|
+
)
|
|
2201
|
+
if original_message:
|
|
2202
|
+
try:
|
|
2203
|
+
original_message.call_acknowledgements()
|
|
2204
|
+
log.info(
|
|
2205
|
+
"%s Called ACK for original message of task %s.",
|
|
2206
|
+
self.log_identifier,
|
|
2207
|
+
logical_task_id,
|
|
2208
|
+
)
|
|
2209
|
+
except Exception as ack_e:
|
|
2210
|
+
log.error(
|
|
2211
|
+
"%s Failed to call ACK for task %s: %s",
|
|
2212
|
+
self.log_identifier,
|
|
2213
|
+
logical_task_id,
|
|
2214
|
+
ack_e,
|
|
2215
|
+
)
|
|
2216
|
+
else:
|
|
2217
|
+
log.warning(
|
|
2218
|
+
"%s Original Solace message not found in context for task %s. Cannot ACK.",
|
|
2219
|
+
self.log_identifier,
|
|
2220
|
+
logical_task_id,
|
|
2221
|
+
)
|
|
2222
|
+
|
|
2223
|
+
except Exception as e:
|
|
2224
|
+
log.exception(
|
|
2225
|
+
"%s Error during successful finalization of task %s: %s",
|
|
2226
|
+
self.log_identifier,
|
|
2227
|
+
logical_task_id,
|
|
2228
|
+
e,
|
|
2229
|
+
)
|
|
2230
|
+
if original_message:
|
|
2231
|
+
try:
|
|
2232
|
+
original_message.call_negative_acknowledgements()
|
|
2233
|
+
log.warning(
|
|
2234
|
+
"%s Called NACK for original message of task %s due to finalization error.",
|
|
2235
|
+
self.log_identifier,
|
|
2236
|
+
logical_task_id,
|
|
2237
|
+
)
|
|
2238
|
+
except Exception as nack_e:
|
|
2239
|
+
log.error(
|
|
2240
|
+
"%s Failed to call NACK for task %s after finalization error: %s",
|
|
2241
|
+
self.log_identifier,
|
|
2242
|
+
logical_task_id,
|
|
2243
|
+
nack_e,
|
|
2244
|
+
)
|
|
2245
|
+
else:
|
|
2246
|
+
log.warning(
|
|
2247
|
+
"%s Original Solace message not found in context for task %s during finalization error. Cannot NACK.",
|
|
2248
|
+
self.log_identifier,
|
|
2249
|
+
logical_task_id,
|
|
2250
|
+
)
|
|
2251
|
+
|
|
2252
|
+
try:
|
|
2253
|
+
jsonrpc_request_id = a2a_context.get("jsonrpc_request_id")
|
|
2254
|
+
client_id = a2a_context.get("client_id")
|
|
2255
|
+
peer_reply_topic = a2a_context.get("replyToTopic")
|
|
2256
|
+
namespace = self.get_config("namespace")
|
|
2257
|
+
error_response = JSONRPCResponse(
|
|
2258
|
+
id=jsonrpc_request_id,
|
|
2259
|
+
error=InternalError(
|
|
2260
|
+
message=f"Failed to finalize successful task: {e}",
|
|
2261
|
+
data={"taskId": logical_task_id},
|
|
2262
|
+
),
|
|
2263
|
+
)
|
|
2264
|
+
target_topic = peer_reply_topic or get_client_response_topic(
|
|
2265
|
+
namespace, client_id
|
|
2266
|
+
)
|
|
2267
|
+
self._publish_a2a_message(
|
|
2268
|
+
error_response.model_dump(exclude_none=True), target_topic
|
|
2269
|
+
)
|
|
2270
|
+
except Exception as report_err:
|
|
2271
|
+
log.error(
|
|
2272
|
+
"%s Failed to report finalization error for task %s: %s",
|
|
2273
|
+
self.log_identifier,
|
|
2274
|
+
logical_task_id,
|
|
2275
|
+
report_err,
|
|
2276
|
+
)
|
|
2277
|
+
|
|
2278
|
+
def finalize_task_canceled(self, a2a_context: Dict):
|
|
2279
|
+
"""
|
|
2280
|
+
Finalizes a task as CANCELED. Publishes A2A Task response with CANCELED state
|
|
2281
|
+
and ACKs the original message if available.
|
|
2282
|
+
Called by the background ADK thread wrapper when a task is cancelled.
|
|
2283
|
+
"""
|
|
2284
|
+
logical_task_id = a2a_context.get("logical_task_id")
|
|
2285
|
+
original_message: Optional[SolaceMessage] = a2a_context.get(
|
|
2286
|
+
"original_solace_message"
|
|
2287
|
+
)
|
|
2288
|
+
log.info(
|
|
2289
|
+
"%s Finalizing task %s as CANCELED.", self.log_identifier, logical_task_id
|
|
2290
|
+
)
|
|
2291
|
+
try:
|
|
2292
|
+
jsonrpc_request_id = a2a_context.get("jsonrpc_request_id")
|
|
2293
|
+
client_id = a2a_context.get("client_id")
|
|
2294
|
+
peer_reply_topic = a2a_context.get("replyToTopic")
|
|
2295
|
+
namespace = self.get_config("namespace")
|
|
2296
|
+
|
|
2297
|
+
canceled_status = TaskStatus(
|
|
2298
|
+
state=TaskState.CANCELED,
|
|
2299
|
+
message=A2AMessage(
|
|
2300
|
+
role="agent",
|
|
2301
|
+
parts=[TextPart(text="Task cancelled by request.")],
|
|
2302
|
+
),
|
|
2303
|
+
)
|
|
2304
|
+
agent_name = self.get_config("agent_name")
|
|
2305
|
+
final_task = Task(
|
|
2306
|
+
id=logical_task_id,
|
|
2307
|
+
sessionId=a2a_context.get("session_id"),
|
|
2308
|
+
status=canceled_status,
|
|
2309
|
+
metadata={"agent_name": agent_name},
|
|
2310
|
+
)
|
|
2311
|
+
final_response = JSONRPCResponse(id=jsonrpc_request_id, result=final_task)
|
|
2312
|
+
a2a_payload = final_response.model_dump(exclude_none=True)
|
|
2313
|
+
target_topic = peer_reply_topic or get_client_response_topic(
|
|
2314
|
+
namespace, client_id
|
|
2315
|
+
)
|
|
2316
|
+
|
|
2317
|
+
self._publish_a2a_event(a2a_payload, target_topic, a2a_context)
|
|
2318
|
+
log.info(
|
|
2319
|
+
"%s Published final CANCELED response for task %s to %s.",
|
|
2320
|
+
self.log_identifier,
|
|
2321
|
+
logical_task_id,
|
|
2322
|
+
target_topic,
|
|
2323
|
+
)
|
|
2324
|
+
|
|
2325
|
+
if original_message:
|
|
2326
|
+
try:
|
|
2327
|
+
original_message.call_acknowledgements()
|
|
2328
|
+
log.info(
|
|
2329
|
+
"%s Called ACK for original message of cancelled task %s.",
|
|
2330
|
+
self.log_identifier,
|
|
2331
|
+
logical_task_id,
|
|
2332
|
+
)
|
|
2333
|
+
except Exception as ack_e:
|
|
2334
|
+
log.error(
|
|
2335
|
+
"%s Failed to call ACK for cancelled task %s: %s",
|
|
2336
|
+
self.log_identifier,
|
|
2337
|
+
logical_task_id,
|
|
2338
|
+
ack_e,
|
|
2339
|
+
)
|
|
2340
|
+
else:
|
|
2341
|
+
log.warning(
|
|
2342
|
+
"%s Original Solace message not found in context for cancelled task %s. Cannot ACK.",
|
|
2343
|
+
self.log_identifier,
|
|
2344
|
+
logical_task_id,
|
|
2345
|
+
)
|
|
2346
|
+
|
|
2347
|
+
except Exception as e:
|
|
2348
|
+
log.exception(
|
|
2349
|
+
"%s Error during CANCELED finalization of task %s: %s",
|
|
2350
|
+
self.log_identifier,
|
|
2351
|
+
logical_task_id,
|
|
2352
|
+
e,
|
|
2353
|
+
)
|
|
2354
|
+
if original_message:
|
|
2355
|
+
try:
|
|
2356
|
+
original_message.call_negative_acknowledgements()
|
|
2357
|
+
except Exception:
|
|
2358
|
+
pass
|
|
2359
|
+
|
|
2360
|
+
async def _publish_tool_failure_status(
|
|
2361
|
+
self, exception: Exception, a2a_context: Dict
|
|
2362
|
+
):
|
|
2363
|
+
"""
|
|
2364
|
+
Publishes an intermediate status update indicating a tool execution has failed.
|
|
2365
|
+
This method will flush the buffer before publishing to maintain proper message ordering.
|
|
2366
|
+
"""
|
|
2367
|
+
logical_task_id = a2a_context.get("logical_task_id")
|
|
2368
|
+
log_identifier_helper = (
|
|
2369
|
+
f"{self.log_identifier}[ToolFailureStatus:{logical_task_id}]"
|
|
2370
|
+
)
|
|
2371
|
+
try:
|
|
2372
|
+
# Create the status update event
|
|
2373
|
+
tool_error_data_part = DataPart(
|
|
2374
|
+
data={
|
|
2375
|
+
"a2a_signal_type": "tool_execution_error",
|
|
2376
|
+
"error_message": str(exception),
|
|
2377
|
+
"details": "An unhandled exception occurred during tool execution.",
|
|
2378
|
+
}
|
|
2379
|
+
)
|
|
2380
|
+
|
|
2381
|
+
status_message = A2AMessage(role="agent", parts=[tool_error_data_part])
|
|
2382
|
+
intermediate_status = TaskStatus(
|
|
2383
|
+
state=TaskState.WORKING,
|
|
2384
|
+
message=status_message,
|
|
2385
|
+
timestamp=datetime.now(timezone.utc),
|
|
2386
|
+
)
|
|
2387
|
+
|
|
2388
|
+
status_update_event = TaskStatusUpdateEvent(
|
|
2389
|
+
id=logical_task_id,
|
|
2390
|
+
status=intermediate_status,
|
|
2391
|
+
final=False,
|
|
2392
|
+
metadata={"agent_name": self.get_config("agent_name")},
|
|
2393
|
+
)
|
|
2394
|
+
|
|
2395
|
+
await self._publish_status_update_with_buffer_flush(
|
|
2396
|
+
status_update_event,
|
|
2397
|
+
a2a_context,
|
|
2398
|
+
skip_buffer_flush=False,
|
|
2399
|
+
)
|
|
2400
|
+
|
|
2401
|
+
log.debug(
|
|
2402
|
+
"%s Published tool failure status update.",
|
|
2403
|
+
log_identifier_helper,
|
|
2404
|
+
)
|
|
2405
|
+
|
|
2406
|
+
except Exception as e:
|
|
2407
|
+
log.error(
|
|
2408
|
+
"%s Failed to publish intermediate tool failure status: %s",
|
|
2409
|
+
log_identifier_helper,
|
|
2410
|
+
e,
|
|
2411
|
+
)
|
|
2412
|
+
|
|
2413
|
+
async def _repair_session_history_on_error(
|
|
2414
|
+
self, exception: Exception, a2a_context: Dict
|
|
2415
|
+
):
|
|
2416
|
+
"""
|
|
2417
|
+
Reactively repairs the session history if the last event was a tool call.
|
|
2418
|
+
This is "the belt" in the belt-and-suspenders strategy.
|
|
2419
|
+
"""
|
|
2420
|
+
log_identifier = f"{self.log_identifier}[HistoryRepair]"
|
|
2421
|
+
try:
|
|
2422
|
+
from ...agent.adk.callbacks import create_dangling_tool_call_repair_content
|
|
2423
|
+
|
|
2424
|
+
session_id = a2a_context.get("effective_session_id")
|
|
2425
|
+
user_id = a2a_context.get("user_id")
|
|
2426
|
+
agent_name = self.get_config("agent_name")
|
|
2427
|
+
|
|
2428
|
+
if not all([session_id, user_id, agent_name, self.session_service]):
|
|
2429
|
+
log.warning(
|
|
2430
|
+
"%s Skipping history repair due to missing context.", log_identifier
|
|
2431
|
+
)
|
|
2432
|
+
return
|
|
2433
|
+
|
|
2434
|
+
session = await self.session_service.get_session(
|
|
2435
|
+
app_name=agent_name, user_id=user_id, session_id=session_id
|
|
2436
|
+
)
|
|
2437
|
+
|
|
2438
|
+
if not session or not session.events:
|
|
2439
|
+
log.debug(
|
|
2440
|
+
"%s No session or events found for history repair.", log_identifier
|
|
2441
|
+
)
|
|
2442
|
+
return
|
|
2443
|
+
|
|
2444
|
+
last_event = session.events[-1]
|
|
2445
|
+
function_calls = last_event.get_function_calls()
|
|
2446
|
+
|
|
2447
|
+
if not function_calls:
|
|
2448
|
+
log.debug(
|
|
2449
|
+
"%s Last event was not a function call. No repair needed.",
|
|
2450
|
+
log_identifier,
|
|
2451
|
+
)
|
|
2452
|
+
return
|
|
2453
|
+
|
|
2454
|
+
log.info(
|
|
2455
|
+
"%s Last event contained function_call(s). Repairing session history.",
|
|
2456
|
+
log_identifier,
|
|
2457
|
+
)
|
|
2458
|
+
|
|
2459
|
+
repair_content = create_dangling_tool_call_repair_content(
|
|
2460
|
+
dangling_calls=function_calls,
|
|
2461
|
+
error_message=f"Tool execution failed with an unhandled exception: {str(exception)}",
|
|
2462
|
+
)
|
|
2463
|
+
|
|
2464
|
+
repair_event = ADKEvent(
|
|
2465
|
+
invocation_id=last_event.invocation_id,
|
|
2466
|
+
author=agent_name,
|
|
2467
|
+
content=repair_content,
|
|
2468
|
+
)
|
|
2469
|
+
|
|
2470
|
+
await self.session_service.append_event(session=session, event=repair_event)
|
|
2471
|
+
log.info(
|
|
2472
|
+
"%s Session history repaired successfully with an error function_response.",
|
|
2473
|
+
log_identifier,
|
|
2474
|
+
)
|
|
2475
|
+
|
|
2476
|
+
except Exception as e:
|
|
2477
|
+
log.exception(
|
|
2478
|
+
"%s Critical error during session history repair: %s", log_identifier, e
|
|
2479
|
+
)
|
|
2480
|
+
|
|
2481
|
+
def finalize_task_limit_reached(
|
|
2482
|
+
self, a2a_context: Dict, exception: LlmCallsLimitExceededError
|
|
2483
|
+
):
|
|
2484
|
+
"""
|
|
2485
|
+
Finalizes a task when the LLM call limit is reached, prompting the user to continue.
|
|
2486
|
+
Sends a COMPLETED status with an informative message.
|
|
2487
|
+
"""
|
|
2488
|
+
logical_task_id = a2a_context.get("logical_task_id")
|
|
2489
|
+
original_message: Optional[SolaceMessage] = a2a_context.get(
|
|
2490
|
+
"original_solace_message"
|
|
2491
|
+
)
|
|
2492
|
+
log.info(
|
|
2493
|
+
"%s Finalizing task %s as COMPLETED (LLM call limit reached).",
|
|
2494
|
+
self.log_identifier,
|
|
2495
|
+
logical_task_id,
|
|
2496
|
+
)
|
|
2497
|
+
try:
|
|
2498
|
+
jsonrpc_request_id = a2a_context.get("jsonrpc_request_id")
|
|
2499
|
+
client_id = a2a_context.get("client_id")
|
|
2500
|
+
peer_reply_topic = a2a_context.get("replyToTopic")
|
|
2501
|
+
namespace = self.get_config("namespace")
|
|
2502
|
+
agent_name = self.get_config("agent_name")
|
|
2503
|
+
original_session_id = a2a_context.get("session_id")
|
|
2504
|
+
|
|
2505
|
+
limit_message_text = (
|
|
2506
|
+
f"This interaction has reached its processing limit. "
|
|
2507
|
+
"If you'd like to continue this conversation, please type 'continue'. "
|
|
2508
|
+
"Otherwise, you can start a new topic."
|
|
2509
|
+
)
|
|
2510
|
+
|
|
2511
|
+
error_payload = InternalError(
|
|
2512
|
+
message=limit_message_text,
|
|
2513
|
+
data={"taskId": logical_task_id, "reason": "llm_call_limit_reached"},
|
|
2514
|
+
)
|
|
2515
|
+
|
|
2516
|
+
final_response = JSONRPCResponse(id=jsonrpc_request_id, error=error_payload)
|
|
2517
|
+
a2a_payload = final_response.model_dump(exclude_none=True)
|
|
2518
|
+
|
|
2519
|
+
target_topic = peer_reply_topic or get_client_response_topic(
|
|
2520
|
+
namespace, client_id
|
|
2521
|
+
)
|
|
2522
|
+
|
|
2523
|
+
self._publish_a2a_event(a2a_payload, target_topic, a2a_context)
|
|
2524
|
+
log.info(
|
|
2525
|
+
"%s Published ERROR response for task %s to %s (LLM limit reached, user guided to continue).",
|
|
2526
|
+
self.log_identifier,
|
|
2527
|
+
logical_task_id,
|
|
2528
|
+
target_topic,
|
|
2529
|
+
)
|
|
2530
|
+
|
|
2531
|
+
if original_message:
|
|
2532
|
+
try:
|
|
2533
|
+
original_message.call_acknowledgements()
|
|
2534
|
+
log.info(
|
|
2535
|
+
"%s Called ACK for original message of task %s (LLM limit reached).",
|
|
2536
|
+
self.log_identifier,
|
|
2537
|
+
logical_task_id,
|
|
2538
|
+
)
|
|
2539
|
+
except Exception as ack_e:
|
|
2540
|
+
log.error(
|
|
2541
|
+
"%s Failed to call ACK for task %s (LLM limit reached): %s",
|
|
2542
|
+
self.log_identifier,
|
|
2543
|
+
logical_task_id,
|
|
2544
|
+
ack_e,
|
|
2545
|
+
)
|
|
2546
|
+
else:
|
|
2547
|
+
log.warning(
|
|
2548
|
+
"%s Original Solace message not found in context for task %s (LLM limit reached). Cannot ACK.",
|
|
2549
|
+
self.log_identifier,
|
|
2550
|
+
logical_task_id,
|
|
2551
|
+
)
|
|
2552
|
+
|
|
2553
|
+
except Exception as e:
|
|
2554
|
+
log.exception(
|
|
2555
|
+
"%s Error during COMPLETED (LLM limit) finalization of task %s: %s",
|
|
2556
|
+
self.log_identifier,
|
|
2557
|
+
logical_task_id,
|
|
2558
|
+
e,
|
|
2559
|
+
)
|
|
2560
|
+
self.finalize_task_error(e, a2a_context)
|
|
2561
|
+
|
|
2562
|
+
async def finalize_task_error(self, exception: Exception, a2a_context: Dict):
|
|
2563
|
+
"""
|
|
2564
|
+
Finalizes a task with an error. Publishes a final A2A Task with a FAILED
|
|
2565
|
+
status and NACKs the original message.
|
|
2566
|
+
Called by the background ADK thread wrapper.
|
|
2567
|
+
"""
|
|
2568
|
+
logical_task_id = a2a_context.get("logical_task_id")
|
|
2569
|
+
original_message: Optional[SolaceMessage] = a2a_context.get(
|
|
2570
|
+
"original_solace_message"
|
|
2571
|
+
)
|
|
2572
|
+
log.error(
|
|
2573
|
+
"%s Finalizing task %s with error: %s",
|
|
2574
|
+
self.log_identifier,
|
|
2575
|
+
logical_task_id,
|
|
2576
|
+
exception,
|
|
2577
|
+
)
|
|
2578
|
+
try:
|
|
2579
|
+
await self._repair_session_history_on_error(exception, a2a_context)
|
|
2580
|
+
|
|
2581
|
+
await self._publish_tool_failure_status(exception, a2a_context)
|
|
2582
|
+
|
|
2583
|
+
client_id = a2a_context.get("client_id")
|
|
2584
|
+
jsonrpc_request_id = a2a_context.get("jsonrpc_request_id")
|
|
2585
|
+
peer_reply_topic = a2a_context.get("replyToTopic")
|
|
2586
|
+
namespace = self.get_config("namespace")
|
|
2587
|
+
|
|
2588
|
+
failed_status = TaskStatus(
|
|
2589
|
+
state=TaskState.FAILED,
|
|
2590
|
+
message=A2AMessage(
|
|
2591
|
+
role="agent",
|
|
2592
|
+
parts=[
|
|
2593
|
+
TextPart(
|
|
2594
|
+
text="An unexpected error occurred during tool execution. Please try your request again. If the problem persists, contact an administrator."
|
|
2595
|
+
)
|
|
2596
|
+
],
|
|
2597
|
+
),
|
|
2598
|
+
)
|
|
2599
|
+
|
|
2600
|
+
final_task = Task(
|
|
2601
|
+
id=logical_task_id,
|
|
2602
|
+
sessionId=a2a_context.get("session_id"),
|
|
2603
|
+
status=failed_status,
|
|
2604
|
+
metadata={"agent_name": self.get_config("agent_name")},
|
|
2605
|
+
)
|
|
2606
|
+
|
|
2607
|
+
final_response = JSONRPCResponse(id=jsonrpc_request_id, result=final_task)
|
|
2608
|
+
a2a_payload = final_response.model_dump(exclude_none=True)
|
|
2609
|
+
target_topic = peer_reply_topic or get_client_response_topic(
|
|
2610
|
+
namespace, client_id
|
|
2611
|
+
)
|
|
2612
|
+
|
|
2613
|
+
self._publish_a2a_event(a2a_payload, target_topic, a2a_context)
|
|
2614
|
+
log.info(
|
|
2615
|
+
"%s Published final FAILED Task response for task %s to %s",
|
|
2616
|
+
self.log_identifier,
|
|
2617
|
+
logical_task_id,
|
|
2618
|
+
target_topic,
|
|
2619
|
+
)
|
|
2620
|
+
|
|
2621
|
+
if original_message:
|
|
2622
|
+
try:
|
|
2623
|
+
original_message.call_negative_acknowledgements()
|
|
2624
|
+
log.info(
|
|
2625
|
+
"%s Called NACK for original message of failed task %s.",
|
|
2626
|
+
self.log_identifier,
|
|
2627
|
+
logical_task_id,
|
|
2628
|
+
)
|
|
2629
|
+
except Exception as nack_e:
|
|
2630
|
+
log.error(
|
|
2631
|
+
"%s Failed to call NACK for failed task %s: %s",
|
|
2632
|
+
self.log_identifier,
|
|
2633
|
+
logical_task_id,
|
|
2634
|
+
nack_e,
|
|
2635
|
+
)
|
|
2636
|
+
else:
|
|
2637
|
+
log.warning(
|
|
2638
|
+
"%s Original Solace message not found in context for failed task %s. Cannot NACK.",
|
|
2639
|
+
self.log_identifier,
|
|
2640
|
+
logical_task_id,
|
|
2641
|
+
)
|
|
2642
|
+
|
|
2643
|
+
except Exception as e:
|
|
2644
|
+
log.exception(
|
|
2645
|
+
"%s Error during error finalization of task %s: %s",
|
|
2646
|
+
self.log_identifier,
|
|
2647
|
+
logical_task_id,
|
|
2648
|
+
e,
|
|
2649
|
+
)
|
|
2650
|
+
if original_message:
|
|
2651
|
+
try:
|
|
2652
|
+
original_message.call_negative_acknowledgements()
|
|
2653
|
+
log.warning(
|
|
2654
|
+
"%s Called NACK for task %s during error finalization fallback.",
|
|
2655
|
+
self.log_identifier,
|
|
2656
|
+
logical_task_id,
|
|
2657
|
+
)
|
|
2658
|
+
except Exception as nack_e:
|
|
2659
|
+
log.error(
|
|
2660
|
+
"%s Failed to call NACK for task %s during error finalization fallback: %s",
|
|
2661
|
+
self.log_identifier,
|
|
2662
|
+
logical_task_id,
|
|
2663
|
+
nack_e,
|
|
2664
|
+
)
|
|
2665
|
+
else:
|
|
2666
|
+
log.warning(
|
|
2667
|
+
"%s Original Solace message not found for task %s during error finalization fallback. Cannot NACK.",
|
|
2668
|
+
self.log_identifier,
|
|
2669
|
+
logical_task_id,
|
|
2670
|
+
)
|
|
2671
|
+
|
|
2672
|
+
async def finalize_task_with_cleanup(
|
|
2673
|
+
self, a2a_context: Dict, is_paused: bool, exception: Optional[Exception] = None
|
|
2674
|
+
):
|
|
2675
|
+
"""
|
|
2676
|
+
Centralized async method to finalize a task and perform all necessary cleanup.
|
|
2677
|
+
This is scheduled on the component's event loop to ensure it runs after
|
|
2678
|
+
any pending status updates.
|
|
2679
|
+
|
|
2680
|
+
Args:
|
|
2681
|
+
a2a_context: The context dictionary for the task.
|
|
2682
|
+
is_paused: Boolean indicating if the task is paused for a long-running tool.
|
|
2683
|
+
exception: The exception that occurred, if any.
|
|
2684
|
+
"""
|
|
2685
|
+
logical_task_id = a2a_context.get("logical_task_id", "unknown_task")
|
|
2686
|
+
log_id = f"{self.log_identifier}[FinalizeTask:{logical_task_id}]"
|
|
2687
|
+
log.info(
|
|
2688
|
+
"%s Starting finalization and cleanup. Paused: %s, Exception: %s",
|
|
2689
|
+
log_id,
|
|
2690
|
+
is_paused,
|
|
2691
|
+
type(exception).__name__ if exception else "None",
|
|
2692
|
+
)
|
|
2693
|
+
|
|
2694
|
+
try:
|
|
2695
|
+
if is_paused:
|
|
2696
|
+
log.info(
|
|
2697
|
+
"%s Task is paused for a long-running tool. Skipping finalization logic.",
|
|
2698
|
+
log_id,
|
|
2699
|
+
)
|
|
2700
|
+
else:
|
|
2701
|
+
try:
|
|
2702
|
+
if exception:
|
|
2703
|
+
if isinstance(exception, TaskCancelledError):
|
|
2704
|
+
self.finalize_task_canceled(a2a_context)
|
|
2705
|
+
elif isinstance(exception, LlmCallsLimitExceededError):
|
|
2706
|
+
self.finalize_task_limit_reached(a2a_context, exception)
|
|
2707
|
+
else:
|
|
2708
|
+
await self.finalize_task_error(exception, a2a_context)
|
|
2709
|
+
else:
|
|
2710
|
+
await self.finalize_task_success(a2a_context)
|
|
2711
|
+
except Exception as e:
|
|
2712
|
+
log.exception(
|
|
2713
|
+
"%s An unexpected error occurred during the finalization logic itself: %s",
|
|
2714
|
+
log_id,
|
|
2715
|
+
e,
|
|
2716
|
+
)
|
|
2717
|
+
original_message: Optional[SolaceMessage] = a2a_context.get(
|
|
2718
|
+
"original_solace_message"
|
|
2719
|
+
)
|
|
2720
|
+
if original_message:
|
|
2721
|
+
try:
|
|
2722
|
+
original_message.call_negative_acknowledgements()
|
|
2723
|
+
except Exception as nack_err:
|
|
2724
|
+
log.error(
|
|
2725
|
+
"%s Fallback NACK failed during finalization error: %s",
|
|
2726
|
+
log_id,
|
|
2727
|
+
nack_err,
|
|
2728
|
+
)
|
|
2729
|
+
finally:
|
|
2730
|
+
if not is_paused:
|
|
2731
|
+
# Cleanup for RUN_BASED sessions remains, as it's a service-level concern
|
|
2732
|
+
if a2a_context.get("is_run_based_session"):
|
|
2733
|
+
temp_session_id_to_delete = a2a_context.get(
|
|
2734
|
+
"temporary_run_session_id_for_cleanup"
|
|
2735
|
+
)
|
|
2736
|
+
agent_name_for_session = a2a_context.get("agent_name_for_session")
|
|
2737
|
+
user_id_for_session = a2a_context.get("user_id_for_session")
|
|
2738
|
+
|
|
2739
|
+
if (
|
|
2740
|
+
temp_session_id_to_delete
|
|
2741
|
+
and agent_name_for_session
|
|
2742
|
+
and user_id_for_session
|
|
2743
|
+
):
|
|
2744
|
+
log.info(
|
|
2745
|
+
"%s Cleaning up RUN_BASED session (app: %s, user: %s, id: %s) from shared service for task_id='%s'",
|
|
2746
|
+
log_id,
|
|
2747
|
+
agent_name_for_session,
|
|
2748
|
+
user_id_for_session,
|
|
2749
|
+
temp_session_id_to_delete,
|
|
2750
|
+
logical_task_id,
|
|
2751
|
+
)
|
|
2752
|
+
try:
|
|
2753
|
+
if self.session_service:
|
|
2754
|
+
await self.session_service.delete_session(
|
|
2755
|
+
app_name=agent_name_for_session,
|
|
2756
|
+
user_id=user_id_for_session,
|
|
2757
|
+
session_id=temp_session_id_to_delete,
|
|
2758
|
+
)
|
|
2759
|
+
else:
|
|
2760
|
+
log.error(
|
|
2761
|
+
"%s self.session_service is None, cannot delete RUN_BASED session %s.",
|
|
2762
|
+
log_id,
|
|
2763
|
+
temp_session_id_to_delete,
|
|
2764
|
+
)
|
|
2765
|
+
except AttributeError:
|
|
2766
|
+
log.error(
|
|
2767
|
+
"%s self.session_service does not support 'delete_session'. Cleanup for RUN_BASED session (app: %s, user: %s, id: %s) skipped.",
|
|
2768
|
+
log_id,
|
|
2769
|
+
agent_name_for_session,
|
|
2770
|
+
user_id_for_session,
|
|
2771
|
+
temp_session_id_to_delete,
|
|
2772
|
+
)
|
|
2773
|
+
except Exception as e_cleanup:
|
|
2774
|
+
log.error(
|
|
2775
|
+
"%s Error cleaning up RUN_BASED session (app: %s, user: %s, id: %s) from shared service: %s",
|
|
2776
|
+
log_id,
|
|
2777
|
+
agent_name_for_session,
|
|
2778
|
+
user_id_for_session,
|
|
2779
|
+
temp_session_id_to_delete,
|
|
2780
|
+
e_cleanup,
|
|
2781
|
+
exc_info=True,
|
|
2782
|
+
)
|
|
2783
|
+
else:
|
|
2784
|
+
log.warning(
|
|
2785
|
+
"%s Could not clean up RUN_BASED session for task %s due to missing context (id_to_delete: %s, agent_name: %s, user_id: %s).",
|
|
2786
|
+
log_id,
|
|
2787
|
+
logical_task_id,
|
|
2788
|
+
temp_session_id_to_delete,
|
|
2789
|
+
agent_name_for_session,
|
|
2790
|
+
user_id_for_session,
|
|
2791
|
+
)
|
|
2792
|
+
|
|
2793
|
+
with self.active_tasks_lock:
|
|
2794
|
+
removed_task_context = self.active_tasks.pop(logical_task_id, None)
|
|
2795
|
+
if removed_task_context:
|
|
2796
|
+
log.debug(
|
|
2797
|
+
"%s Removed TaskExecutionContext for task %s.",
|
|
2798
|
+
log_id,
|
|
2799
|
+
logical_task_id,
|
|
2800
|
+
)
|
|
2801
|
+
else:
|
|
2802
|
+
log.warning(
|
|
2803
|
+
"%s TaskExecutionContext for task %s was already removed.",
|
|
2804
|
+
log_id,
|
|
2805
|
+
logical_task_id,
|
|
2806
|
+
)
|
|
2807
|
+
else:
|
|
2808
|
+
log.info(
|
|
2809
|
+
"%s Task %s is paused for a long-running tool. Skipping all cleanup.",
|
|
2810
|
+
log_id,
|
|
2811
|
+
logical_task_id,
|
|
2812
|
+
)
|
|
2813
|
+
|
|
2814
|
+
log.info(
|
|
2815
|
+
"%s Finalization and cleanup complete for task %s.",
|
|
2816
|
+
log_id,
|
|
2817
|
+
logical_task_id,
|
|
2818
|
+
)
|
|
2819
|
+
|
|
2820
|
+
def _resolve_instruction_provider(
|
|
2821
|
+
self, config_value: Any
|
|
2822
|
+
) -> Union[str, InstructionProvider]:
|
|
2823
|
+
"""Resolves instruction config using helper."""
|
|
2824
|
+
return resolve_instruction_provider(self, config_value)
|
|
2825
|
+
|
|
2826
|
+
def _get_a2a_base_topic(self) -> str:
|
|
2827
|
+
"""Returns the base topic prefix using helper."""
|
|
2828
|
+
return get_a2a_base_topic(self.namespace)
|
|
2829
|
+
|
|
2830
|
+
def _get_discovery_topic(self) -> str:
|
|
2831
|
+
"""Returns the discovery topic using helper."""
|
|
2832
|
+
return get_discovery_topic(self.namespace)
|
|
2833
|
+
|
|
2834
|
+
def _get_agent_request_topic(self, agent_id: str) -> str:
|
|
2835
|
+
"""Returns the agent request topic using helper."""
|
|
2836
|
+
return get_agent_request_topic(self.namespace, agent_id)
|
|
2837
|
+
|
|
2838
|
+
def _get_agent_response_topic(
|
|
2839
|
+
self, delegating_agent_name: str, sub_task_id: str
|
|
2840
|
+
) -> str:
|
|
2841
|
+
"""Returns the agent response topic using helper."""
|
|
2842
|
+
return get_agent_response_topic(
|
|
2843
|
+
self.namespace, delegating_agent_name, sub_task_id
|
|
2844
|
+
)
|
|
2845
|
+
|
|
2846
|
+
def _get_peer_agent_status_topic(
|
|
2847
|
+
self, delegating_agent_name: str, sub_task_id: str
|
|
2848
|
+
) -> str:
|
|
2849
|
+
"""Returns the peer agent status topic using helper."""
|
|
2850
|
+
return get_peer_agent_status_topic(
|
|
2851
|
+
self.namespace, delegating_agent_name, sub_task_id
|
|
2852
|
+
)
|
|
2853
|
+
|
|
2854
|
+
def _get_client_response_topic(self, client_id: str) -> str:
|
|
2855
|
+
"""Returns the client response topic using helper."""
|
|
2856
|
+
return get_client_response_topic(self.namespace, client_id)
|
|
2857
|
+
|
|
2858
|
+
def _publish_a2a_message(
|
|
2859
|
+
self, payload: Dict, topic: str, user_properties: Optional[Dict] = None
|
|
2860
|
+
):
|
|
2861
|
+
"""Helper to publish A2A messages via the SAC App."""
|
|
2862
|
+
try:
|
|
2863
|
+
app = self.get_app()
|
|
2864
|
+
if app:
|
|
2865
|
+
if self.invocation_monitor:
|
|
2866
|
+
self.invocation_monitor.log_message_event(
|
|
2867
|
+
direction="PUBLISHED",
|
|
2868
|
+
topic=topic,
|
|
2869
|
+
payload=payload,
|
|
2870
|
+
component_identifier=self.log_identifier,
|
|
2871
|
+
)
|
|
2872
|
+
app.send_message(
|
|
2873
|
+
payload=payload, topic=topic, user_properties=user_properties
|
|
2874
|
+
)
|
|
2875
|
+
else:
|
|
2876
|
+
log.error(
|
|
2877
|
+
"%s Cannot publish message: Not running within a SAC App context.",
|
|
2878
|
+
self.log_identifier,
|
|
2879
|
+
)
|
|
2880
|
+
except Exception as e:
|
|
2881
|
+
log.exception(
|
|
2882
|
+
"%s Failed to publish A2A message to topic %s: %s",
|
|
2883
|
+
self.log_identifier,
|
|
2884
|
+
topic,
|
|
2885
|
+
e,
|
|
2886
|
+
)
|
|
2887
|
+
raise
|
|
2888
|
+
|
|
2889
|
+
def _publish_a2a_event(self, payload: Dict, topic: str, a2a_context: Dict):
|
|
2890
|
+
"""
|
|
2891
|
+
Centralized helper to publish an A2A event, ensuring user properties
|
|
2892
|
+
are consistently attached from the a2a_context.
|
|
2893
|
+
"""
|
|
2894
|
+
user_properties = {}
|
|
2895
|
+
if a2a_context.get("a2a_user_config"):
|
|
2896
|
+
user_properties["a2aUserConfig"] = a2a_context["a2a_user_config"]
|
|
2897
|
+
|
|
2898
|
+
self._publish_a2a_message(payload, topic, user_properties)
|
|
2899
|
+
|
|
2900
|
+
def submit_a2a_task(
|
|
2901
|
+
self,
|
|
2902
|
+
target_agent_name: str,
|
|
2903
|
+
a2a_message: A2AMessage,
|
|
2904
|
+
original_session_id: str,
|
|
2905
|
+
main_logical_task_id: str,
|
|
2906
|
+
user_id: str,
|
|
2907
|
+
user_config: Dict[str, Any],
|
|
2908
|
+
sub_task_id: str,
|
|
2909
|
+
function_call_id: Optional[str] = None,
|
|
2910
|
+
) -> str:
|
|
2911
|
+
"""
|
|
2912
|
+
Submits a task to a peer agent in a non-blocking way.
|
|
2913
|
+
Returns the sub_task_id for correlation.
|
|
2914
|
+
"""
|
|
2915
|
+
log_identifier_helper = (
|
|
2916
|
+
f"{self.log_identifier}[SubmitA2ATask:{target_agent_name}]"
|
|
2917
|
+
)
|
|
2918
|
+
log.debug(
|
|
2919
|
+
"%s Submitting non-blocking task for main task %s",
|
|
2920
|
+
log_identifier_helper,
|
|
2921
|
+
main_logical_task_id,
|
|
2922
|
+
)
|
|
2923
|
+
|
|
2924
|
+
peer_request_topic = self._get_agent_request_topic(target_agent_name)
|
|
2925
|
+
|
|
2926
|
+
a2a_request_params = {
|
|
2927
|
+
"id": sub_task_id,
|
|
2928
|
+
"sessionId": original_session_id,
|
|
2929
|
+
"message": a2a_message.model_dump(exclude_none=True),
|
|
2930
|
+
"metadata": {
|
|
2931
|
+
"sessionBehavior": "RUN_BASED",
|
|
2932
|
+
"parentTaskId": main_logical_task_id,
|
|
2933
|
+
"function_call_id": function_call_id,
|
|
2934
|
+
},
|
|
2935
|
+
}
|
|
2936
|
+
a2a_request = SendTaskRequest(params=a2a_request_params)
|
|
2937
|
+
|
|
2938
|
+
delegating_agent_name = self.get_config("agent_name")
|
|
2939
|
+
reply_to_topic = self._get_agent_response_topic(
|
|
2940
|
+
delegating_agent_name=delegating_agent_name,
|
|
2941
|
+
sub_task_id=sub_task_id,
|
|
2942
|
+
)
|
|
2943
|
+
status_topic = self._get_peer_agent_status_topic(
|
|
2944
|
+
delegating_agent_name=delegating_agent_name,
|
|
2945
|
+
sub_task_id=sub_task_id,
|
|
2946
|
+
)
|
|
2947
|
+
|
|
2948
|
+
user_properties = {
|
|
2949
|
+
"replyTo": reply_to_topic,
|
|
2950
|
+
"a2aStatusTopic": status_topic,
|
|
2951
|
+
"userId": user_id,
|
|
2952
|
+
}
|
|
2953
|
+
if isinstance(user_config, dict):
|
|
2954
|
+
user_properties["a2aUserConfig"] = user_config
|
|
2955
|
+
|
|
2956
|
+
self._publish_a2a_message(
|
|
2957
|
+
payload=a2a_request.model_dump(exclude_none=True),
|
|
2958
|
+
topic=peer_request_topic,
|
|
2959
|
+
user_properties=user_properties,
|
|
2960
|
+
)
|
|
2961
|
+
log.info(
|
|
2962
|
+
"%s Published delegation request to %s (Sub-Task ID: %s, ReplyTo: %s, StatusTo: %s)",
|
|
2963
|
+
log_identifier_helper,
|
|
2964
|
+
peer_request_topic,
|
|
2965
|
+
sub_task_id,
|
|
2966
|
+
reply_to_topic,
|
|
2967
|
+
status_topic,
|
|
2968
|
+
)
|
|
2969
|
+
|
|
2970
|
+
return sub_task_id
|
|
2971
|
+
|
|
2972
|
+
def _handle_scheduled_task_completion(
|
|
2973
|
+
self, future: concurrent.futures.Future, event_type_for_log: EventType
|
|
2974
|
+
):
|
|
2975
|
+
"""Callback to handle completion of futures from run_coroutine_threadsafe."""
|
|
2976
|
+
try:
|
|
2977
|
+
if future.cancelled():
|
|
2978
|
+
log.warning(
|
|
2979
|
+
"%s Coroutine for event type %s (scheduled via run_coroutine_threadsafe) was cancelled.",
|
|
2980
|
+
self.log_identifier,
|
|
2981
|
+
event_type_for_log,
|
|
2982
|
+
)
|
|
2983
|
+
elif future.done() and future.exception() is not None:
|
|
2984
|
+
exception = future.exception()
|
|
2985
|
+
log.error(
|
|
2986
|
+
"%s Coroutine for event type %s (scheduled via run_coroutine_threadsafe) failed with exception: %s",
|
|
2987
|
+
self.log_identifier,
|
|
2988
|
+
event_type_for_log,
|
|
2989
|
+
exception,
|
|
2990
|
+
exc_info=exception,
|
|
2991
|
+
)
|
|
2992
|
+
else:
|
|
2993
|
+
pass
|
|
2994
|
+
except Exception as e:
|
|
2995
|
+
log.error(
|
|
2996
|
+
"%s Error during _handle_scheduled_task_completion (for run_coroutine_threadsafe future) for event type %s: %s",
|
|
2997
|
+
self.log_identifier,
|
|
2998
|
+
event_type_for_log,
|
|
2999
|
+
e,
|
|
3000
|
+
exc_info=e,
|
|
3001
|
+
)
|
|
3002
|
+
|
|
3003
|
+
def _start_async_loop(self):
|
|
3004
|
+
"""Target method for the dedicated async thread."""
|
|
3005
|
+
log.info("%s Dedicated async thread started.", self.log_identifier)
|
|
3006
|
+
try:
|
|
3007
|
+
asyncio.set_event_loop(self._async_loop)
|
|
3008
|
+
self._async_loop.run_forever()
|
|
3009
|
+
except Exception as e:
|
|
3010
|
+
log.exception(
|
|
3011
|
+
"%s Exception in dedicated async thread loop: %s",
|
|
3012
|
+
self.log_identifier,
|
|
3013
|
+
e,
|
|
3014
|
+
)
|
|
3015
|
+
if self._async_init_future and not self._async_init_future.done():
|
|
3016
|
+
self._async_init_future.set_exception(e)
|
|
3017
|
+
finally:
|
|
3018
|
+
log.info("%s Dedicated async thread loop finishing.", self.log_identifier)
|
|
3019
|
+
if self._async_loop.is_running():
|
|
3020
|
+
self._async_loop.call_soon_threadsafe(self._async_loop.stop)
|
|
3021
|
+
|
|
3022
|
+
async def _perform_async_init(self):
|
|
3023
|
+
"""Coroutine executed on the dedicated loop to perform async initialization."""
|
|
3024
|
+
try:
|
|
3025
|
+
log.info(
|
|
3026
|
+
"%s Loading tools asynchronously in dedicated thread...",
|
|
3027
|
+
self.log_identifier,
|
|
3028
|
+
)
|
|
3029
|
+
loaded_tools, enabled_builtin_tools = await load_adk_tools(self)
|
|
3030
|
+
log.info(
|
|
3031
|
+
"%s Initializing ADK Agent/Runner asynchronously in dedicated thread...",
|
|
3032
|
+
self.log_identifier,
|
|
3033
|
+
)
|
|
3034
|
+
self.adk_agent = initialize_adk_agent(
|
|
3035
|
+
self, loaded_tools, enabled_builtin_tools
|
|
3036
|
+
)
|
|
3037
|
+
self.runner = initialize_adk_runner(self)
|
|
3038
|
+
|
|
3039
|
+
log.info("%s Populating agent card tool manifest...", self.log_identifier)
|
|
3040
|
+
tool_manifest = []
|
|
3041
|
+
for tool in loaded_tools:
|
|
3042
|
+
if isinstance(tool, MCPToolset):
|
|
3043
|
+
try:
|
|
3044
|
+
log.debug(
|
|
3045
|
+
"%s Retrieving tools from MCPToolset for Agent %s...",
|
|
3046
|
+
self.log_identifier,
|
|
3047
|
+
self.agent_name,
|
|
3048
|
+
)
|
|
3049
|
+
mcp_tools = await tool.get_tools()
|
|
3050
|
+
except Exception as e:
|
|
3051
|
+
log.error(
|
|
3052
|
+
"%s Error retrieving tools from MCPToolset for Agent Card %s: %s",
|
|
3053
|
+
self.log_identifier,
|
|
3054
|
+
self.agent_name,
|
|
3055
|
+
e,
|
|
3056
|
+
)
|
|
3057
|
+
continue
|
|
3058
|
+
for mcp_tool in mcp_tools:
|
|
3059
|
+
tool_manifest.append(
|
|
3060
|
+
{
|
|
3061
|
+
"id": mcp_tool.name,
|
|
3062
|
+
"name": mcp_tool.name,
|
|
3063
|
+
"description": mcp_tool.description
|
|
3064
|
+
or "No description available.",
|
|
3065
|
+
}
|
|
3066
|
+
)
|
|
3067
|
+
else:
|
|
3068
|
+
tool_name = getattr(tool, "name", getattr(tool, "__name__", None))
|
|
3069
|
+
if tool_name is not None:
|
|
3070
|
+
tool_manifest.append(
|
|
3071
|
+
{
|
|
3072
|
+
"id": tool_name,
|
|
3073
|
+
"name": tool_name,
|
|
3074
|
+
"description": getattr(
|
|
3075
|
+
tool, "description", getattr(tool, "__doc__", None)
|
|
3076
|
+
)
|
|
3077
|
+
or "No description available.",
|
|
3078
|
+
}
|
|
3079
|
+
)
|
|
3080
|
+
|
|
3081
|
+
self.agent_card_tool_manifest = tool_manifest
|
|
3082
|
+
log.info(
|
|
3083
|
+
"%s Agent card tool manifest populated with %d tools.",
|
|
3084
|
+
self.log_identifier,
|
|
3085
|
+
len(self.agent_card_tool_manifest),
|
|
3086
|
+
)
|
|
3087
|
+
|
|
3088
|
+
log.info(
|
|
3089
|
+
"%s Async initialization steps complete in dedicated thread.",
|
|
3090
|
+
self.log_identifier,
|
|
3091
|
+
)
|
|
3092
|
+
if self._async_init_future and not self._async_init_future.done():
|
|
3093
|
+
log.info(
|
|
3094
|
+
"%s _perform_async_init: Signaling success to main thread.",
|
|
3095
|
+
self.log_identifier,
|
|
3096
|
+
)
|
|
3097
|
+
self._async_loop.call_soon_threadsafe(
|
|
3098
|
+
self._async_init_future.set_result, True
|
|
3099
|
+
)
|
|
3100
|
+
else:
|
|
3101
|
+
log.warning(
|
|
3102
|
+
"%s _perform_async_init: _async_init_future is None or already done before signaling success.",
|
|
3103
|
+
self.log_identifier,
|
|
3104
|
+
)
|
|
3105
|
+
except Exception as e:
|
|
3106
|
+
log.exception(
|
|
3107
|
+
"%s _perform_async_init: Error during async initialization in dedicated thread: %s",
|
|
3108
|
+
self.log_identifier,
|
|
3109
|
+
e,
|
|
3110
|
+
)
|
|
3111
|
+
if self._async_init_future and not self._async_init_future.done():
|
|
3112
|
+
log.error(
|
|
3113
|
+
"%s _perform_async_init: Signaling failure to main thread.",
|
|
3114
|
+
self.log_identifier,
|
|
3115
|
+
)
|
|
3116
|
+
self._async_loop.call_soon_threadsafe(
|
|
3117
|
+
self._async_init_future.set_exception, e
|
|
3118
|
+
)
|
|
3119
|
+
else:
|
|
3120
|
+
log.warning(
|
|
3121
|
+
"%s _perform_async_init: _async_init_future is None or already done before signaling failure.",
|
|
3122
|
+
self.log_identifier,
|
|
3123
|
+
)
|
|
3124
|
+
|
|
3125
|
+
def cleanup(self):
|
|
3126
|
+
"""Clean up resources on component shutdown."""
|
|
3127
|
+
log.info("%s Cleaning up A2A ADK Host Component.", self.log_identifier)
|
|
3128
|
+
self.cancel_timer(self._card_publish_timer_id)
|
|
3129
|
+
|
|
3130
|
+
cleanup_func_details = self.get_config("agent_cleanup_function")
|
|
3131
|
+
if cleanup_func_details and isinstance(cleanup_func_details, dict):
|
|
3132
|
+
module_name = cleanup_func_details.get("module")
|
|
3133
|
+
func_name = cleanup_func_details.get("name")
|
|
3134
|
+
base_path = cleanup_func_details.get("base_path")
|
|
3135
|
+
|
|
3136
|
+
if module_name and func_name:
|
|
3137
|
+
log.info(
|
|
3138
|
+
"%s Attempting to load and execute cleanup_function: %s.%s",
|
|
3139
|
+
self.log_identifier,
|
|
3140
|
+
module_name,
|
|
3141
|
+
func_name,
|
|
3142
|
+
)
|
|
3143
|
+
try:
|
|
3144
|
+
module = import_module(module_name, base_path=base_path)
|
|
3145
|
+
cleanup_function = getattr(module, func_name)
|
|
3146
|
+
|
|
3147
|
+
if not callable(cleanup_function):
|
|
3148
|
+
log.error(
|
|
3149
|
+
"%s Cleanup function '%s' in module '%s' is not callable. Skipping.",
|
|
3150
|
+
self.log_identifier,
|
|
3151
|
+
func_name,
|
|
3152
|
+
module_name,
|
|
3153
|
+
)
|
|
3154
|
+
else:
|
|
3155
|
+
cleanup_function(self)
|
|
3156
|
+
log.info(
|
|
3157
|
+
"%s Successfully executed cleanup_function: %s.%s",
|
|
3158
|
+
self.log_identifier,
|
|
3159
|
+
module_name,
|
|
3160
|
+
func_name,
|
|
3161
|
+
)
|
|
3162
|
+
except Exception as e:
|
|
3163
|
+
log.exception(
|
|
3164
|
+
"%s Error during agent cleanup via cleanup_function '%s.%s': %s",
|
|
3165
|
+
self.log_identifier,
|
|
3166
|
+
module_name,
|
|
3167
|
+
func_name,
|
|
3168
|
+
e,
|
|
3169
|
+
)
|
|
3170
|
+
if self.invocation_monitor:
|
|
3171
|
+
try:
|
|
3172
|
+
self.invocation_monitor.cleanup()
|
|
3173
|
+
except Exception as im_clean_e:
|
|
3174
|
+
log.error(
|
|
3175
|
+
"%s Error during InvocationMonitor cleanup: %s",
|
|
3176
|
+
self.log_identifier,
|
|
3177
|
+
im_clean_e,
|
|
3178
|
+
)
|
|
3179
|
+
|
|
3180
|
+
if self._async_loop and self._async_loop.is_running():
|
|
3181
|
+
log.info(
|
|
3182
|
+
"%s Performing async cleanup via dedicated thread...",
|
|
3183
|
+
self.log_identifier,
|
|
3184
|
+
)
|
|
3185
|
+
|
|
3186
|
+
async def _perform_async_cleanup():
|
|
3187
|
+
log.debug("%s Entering async cleanup coroutine...", self.log_identifier)
|
|
3188
|
+
pass
|
|
3189
|
+
|
|
3190
|
+
try:
|
|
3191
|
+
cleanup_future = asyncio.run_coroutine_threadsafe(
|
|
3192
|
+
_perform_async_cleanup(), self._async_loop
|
|
3193
|
+
)
|
|
3194
|
+
cleanup_future.result(timeout=30)
|
|
3195
|
+
log.info("%s Async cleanup completed.", self.log_identifier)
|
|
3196
|
+
except Exception as e:
|
|
3197
|
+
log.exception(
|
|
3198
|
+
"%s Error during async cleanup: %s", self.log_identifier, e
|
|
3199
|
+
)
|
|
3200
|
+
finally:
|
|
3201
|
+
if self._async_loop and self._async_loop.is_running():
|
|
3202
|
+
log.info(
|
|
3203
|
+
"%s Cleanup: Stopping dedicated async loop...",
|
|
3204
|
+
self.log_identifier,
|
|
3205
|
+
)
|
|
3206
|
+
self._async_loop.call_soon_threadsafe(self._async_loop.stop)
|
|
3207
|
+
else:
|
|
3208
|
+
log.info(
|
|
3209
|
+
"%s Cleanup: Dedicated async loop is None or not running, no need to stop.",
|
|
3210
|
+
self.log_identifier,
|
|
3211
|
+
)
|
|
3212
|
+
if self._async_thread and self._async_thread.is_alive():
|
|
3213
|
+
log.info(
|
|
3214
|
+
"%s Cleanup: Joining dedicated async thread...",
|
|
3215
|
+
self.log_identifier,
|
|
3216
|
+
)
|
|
3217
|
+
self._async_thread.join(timeout=5)
|
|
3218
|
+
if self._async_thread.is_alive():
|
|
3219
|
+
log.warning(
|
|
3220
|
+
"%s Dedicated async thread did not exit cleanly.",
|
|
3221
|
+
self.log_identifier,
|
|
3222
|
+
)
|
|
3223
|
+
log.info(
|
|
3224
|
+
"%s Dedicated async thread stopped and joined.", self.log_identifier
|
|
3225
|
+
)
|
|
3226
|
+
else:
|
|
3227
|
+
log.info(
|
|
3228
|
+
"%s Dedicated async loop not running, skipping async cleanup.",
|
|
3229
|
+
self.log_identifier,
|
|
3230
|
+
)
|
|
3231
|
+
|
|
3232
|
+
with self.active_tasks_lock:
|
|
3233
|
+
if self._async_loop and self._async_loop.is_running():
|
|
3234
|
+
for task_context in self.active_tasks.values():
|
|
3235
|
+
task_context.cancel()
|
|
3236
|
+
self.active_tasks.clear()
|
|
3237
|
+
log.debug("%s Cleared all active tasks.", self.log_identifier)
|
|
3238
|
+
|
|
3239
|
+
super().cleanup()
|
|
3240
|
+
log.info("%s Component cleanup finished.", self.log_identifier)
|
|
3241
|
+
|
|
3242
|
+
def set_agent_specific_state(self, key: str, value: Any):
|
|
3243
|
+
"""
|
|
3244
|
+
Sets a key-value pair in the agent-specific state.
|
|
3245
|
+
Intended to be used by the custom init_function.
|
|
3246
|
+
"""
|
|
3247
|
+
if not hasattr(self, "agent_specific_state"):
|
|
3248
|
+
self.agent_specific_state = {}
|
|
3249
|
+
self.agent_specific_state[key] = value
|
|
3250
|
+
log.debug("%s Set agent_specific_state['%s']", self.log_identifier, key)
|
|
3251
|
+
|
|
3252
|
+
def get_agent_specific_state(self, key: str, default: Optional[Any] = None) -> Any:
|
|
3253
|
+
"""
|
|
3254
|
+
Gets a value from the agent-specific state.
|
|
3255
|
+
Intended to be used by tools and the custom cleanup_function.
|
|
3256
|
+
"""
|
|
3257
|
+
if not hasattr(self, "agent_specific_state"):
|
|
3258
|
+
return default
|
|
3259
|
+
return self.agent_specific_state.get(key, default)
|
|
3260
|
+
|
|
3261
|
+
def get_async_loop(self) -> Optional[asyncio.AbstractEventLoop]:
|
|
3262
|
+
"""Returns the dedicated asyncio event loop for this component's async tasks."""
|
|
3263
|
+
return self._async_loop
|
|
3264
|
+
|
|
3265
|
+
def set_agent_system_instruction_string(self, instruction_string: str) -> None:
|
|
3266
|
+
"""
|
|
3267
|
+
Sets a static string to be injected into the LLM system prompt.
|
|
3268
|
+
Called by the agent's init_function.
|
|
3269
|
+
"""
|
|
3270
|
+
if not isinstance(instruction_string, str):
|
|
3271
|
+
log.error(
|
|
3272
|
+
"%s Invalid type for instruction_string: %s. Must be a string.",
|
|
3273
|
+
self.log_identifier,
|
|
3274
|
+
type(instruction_string),
|
|
3275
|
+
)
|
|
3276
|
+
return
|
|
3277
|
+
self._agent_system_instruction_string = instruction_string
|
|
3278
|
+
self._agent_system_instruction_callback = None
|
|
3279
|
+
log.info("%s Static agent system instruction string set.", self.log_identifier)
|
|
3280
|
+
|
|
3281
|
+
def set_agent_system_instruction_callback(
|
|
3282
|
+
self,
|
|
3283
|
+
callback_function: Callable[[CallbackContext, LlmRequest], Optional[str]],
|
|
3284
|
+
) -> None:
|
|
3285
|
+
"""
|
|
3286
|
+
Sets a callback function to dynamically generate system prompt injections.
|
|
3287
|
+
Called by the agent's init_function.
|
|
3288
|
+
"""
|
|
3289
|
+
if not callable(callback_function):
|
|
3290
|
+
log.error(
|
|
3291
|
+
"%s Invalid type for callback_function: %s. Must be callable.",
|
|
3292
|
+
self.log_identifier,
|
|
3293
|
+
type(callback_function),
|
|
3294
|
+
)
|
|
3295
|
+
return
|
|
3296
|
+
self._agent_system_instruction_callback = callback_function
|
|
3297
|
+
self._agent_system_instruction_string = None
|
|
3298
|
+
log.info("%s Agent system instruction callback set.", self.log_identifier)
|
|
3299
|
+
|
|
3300
|
+
def get_gateway_id(self) -> str:
|
|
3301
|
+
"""
|
|
3302
|
+
Returns a unique identifier for this specific gateway/host instance.
|
|
3303
|
+
For now, using the agent name, but could be made more robust (e.g., hostname + agent name).
|
|
3304
|
+
"""
|
|
3305
|
+
return self.agent_name
|
|
3306
|
+
|
|
3307
|
+
async def _resolve_early_embeds_and_handle_signals(
|
|
3308
|
+
self, raw_text: str, a2a_context: Dict
|
|
3309
|
+
) -> Tuple[str, List[Tuple[int, Any]], str]:
|
|
3310
|
+
"""
|
|
3311
|
+
Resolves early-stage embeds in raw text and extracts signals.
|
|
3312
|
+
Returns the resolved text, a list of signals, and any unprocessed tail.
|
|
3313
|
+
This is called by process_and_publish_adk_event.
|
|
3314
|
+
"""
|
|
3315
|
+
logical_task_id = a2a_context.get("logical_task_id", "unknown_task")
|
|
3316
|
+
method_context_log_identifier = (
|
|
3317
|
+
f"{self.log_identifier}[ResolveEmbeds:{logical_task_id}]"
|
|
3318
|
+
)
|
|
3319
|
+
log.debug(
|
|
3320
|
+
"%s Resolving early embeds for text (length: %d).",
|
|
3321
|
+
method_context_log_identifier,
|
|
3322
|
+
len(raw_text),
|
|
3323
|
+
)
|
|
3324
|
+
|
|
3325
|
+
original_session_id = a2a_context.get("session_id")
|
|
3326
|
+
user_id = a2a_context.get("user_id")
|
|
3327
|
+
adk_app_name = self.get_config("agent_name")
|
|
3328
|
+
|
|
3329
|
+
if not all([self.artifact_service, original_session_id, user_id, adk_app_name]):
|
|
3330
|
+
log.error(
|
|
3331
|
+
"%s Missing necessary context for embed resolution (artifact_service, session_id, user_id, or adk_app_name). Skipping.",
|
|
3332
|
+
method_context_log_identifier,
|
|
3333
|
+
)
|
|
3334
|
+
return (
|
|
3335
|
+
raw_text,
|
|
3336
|
+
[],
|
|
3337
|
+
"",
|
|
3338
|
+
)
|
|
3339
|
+
context_for_embeds = {
|
|
3340
|
+
"artifact_service": self.artifact_service,
|
|
3341
|
+
"session_context": {
|
|
3342
|
+
"app_name": adk_app_name,
|
|
3343
|
+
"user_id": user_id,
|
|
3344
|
+
"session_id": original_session_id,
|
|
3345
|
+
},
|
|
3346
|
+
"config": {
|
|
3347
|
+
"gateway_max_artifact_resolve_size_bytes": self.get_config(
|
|
3348
|
+
"tool_output_llm_return_max_bytes", 4096
|
|
3349
|
+
),
|
|
3350
|
+
"gateway_recursive_embed_depth": self.get_config(
|
|
3351
|
+
"gateway_recursive_embed_depth", 12
|
|
3352
|
+
),
|
|
3353
|
+
},
|
|
3354
|
+
}
|
|
3355
|
+
|
|
3356
|
+
resolver_config = context_for_embeds["config"]
|
|
3357
|
+
|
|
3358
|
+
try:
|
|
3359
|
+
from ...common.utils.embeds.resolver import (
|
|
3360
|
+
resolve_embeds_in_string,
|
|
3361
|
+
evaluate_embed,
|
|
3362
|
+
)
|
|
3363
|
+
from ...common.utils.embeds.constants import EARLY_EMBED_TYPES
|
|
3364
|
+
|
|
3365
|
+
resolved_text, processed_until_index, signals_found = (
|
|
3366
|
+
await resolve_embeds_in_string(
|
|
3367
|
+
text=raw_text,
|
|
3368
|
+
context=context_for_embeds,
|
|
3369
|
+
resolver_func=evaluate_embed,
|
|
3370
|
+
types_to_resolve=EARLY_EMBED_TYPES,
|
|
3371
|
+
log_identifier=method_context_log_identifier,
|
|
3372
|
+
config=resolver_config,
|
|
3373
|
+
)
|
|
3374
|
+
)
|
|
3375
|
+
unprocessed_tail = raw_text[processed_until_index:]
|
|
3376
|
+
log.debug(
|
|
3377
|
+
"%s Embed resolution complete. Resolved text: '%s...', Signals found: %d, Unprocessed tail: '%s...'",
|
|
3378
|
+
method_context_log_identifier,
|
|
3379
|
+
resolved_text[:100],
|
|
3380
|
+
len(signals_found),
|
|
3381
|
+
unprocessed_tail[:100],
|
|
3382
|
+
)
|
|
3383
|
+
return resolved_text, signals_found, unprocessed_tail
|
|
3384
|
+
except Exception as e:
|
|
3385
|
+
log.exception(
|
|
3386
|
+
"%s Error during embed resolution: %s", method_context_log_identifier, e
|
|
3387
|
+
)
|
|
3388
|
+
return raw_text, [], ""
|