solace-agent-mesh 1.11.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- solace_agent_mesh/__init__.py +0 -0
- solace_agent_mesh/agent/__init__.py +0 -0
- solace_agent_mesh/agent/adk/__init__.py +0 -0
- solace_agent_mesh/agent/adk/adk_llm.txt +226 -0
- solace_agent_mesh/agent/adk/adk_llm_detail.txt +566 -0
- solace_agent_mesh/agent/adk/alembic/README +74 -0
- solace_agent_mesh/agent/adk/alembic/env.py +77 -0
- solace_agent_mesh/agent/adk/alembic/script.py.mako +28 -0
- solace_agent_mesh/agent/adk/alembic/versions/e2902798564d_adk_session_db_upgrade.py +52 -0
- solace_agent_mesh/agent/adk/alembic.ini +112 -0
- solace_agent_mesh/agent/adk/app_llm_agent.py +52 -0
- solace_agent_mesh/agent/adk/artifacts/__init__.py +1 -0
- solace_agent_mesh/agent/adk/artifacts/artifacts_llm.txt +171 -0
- solace_agent_mesh/agent/adk/artifacts/filesystem_artifact_service.py +545 -0
- solace_agent_mesh/agent/adk/artifacts/s3_artifact_service.py +609 -0
- solace_agent_mesh/agent/adk/callbacks.py +2318 -0
- solace_agent_mesh/agent/adk/embed_resolving_mcp_toolset.py +406 -0
- solace_agent_mesh/agent/adk/intelligent_mcp_callbacks.py +415 -0
- solace_agent_mesh/agent/adk/mcp_content_processor.py +666 -0
- solace_agent_mesh/agent/adk/models/lite_llm.py +1026 -0
- solace_agent_mesh/agent/adk/models/models_llm.txt +189 -0
- solace_agent_mesh/agent/adk/models/oauth2_token_manager.py +132 -0
- solace_agent_mesh/agent/adk/runner.py +390 -0
- solace_agent_mesh/agent/adk/schema_migration.py +88 -0
- solace_agent_mesh/agent/adk/services.py +468 -0
- solace_agent_mesh/agent/adk/setup.py +1325 -0
- solace_agent_mesh/agent/adk/stream_parser.py +415 -0
- solace_agent_mesh/agent/adk/tool_wrapper.py +165 -0
- solace_agent_mesh/agent/agent_llm.txt +369 -0
- solace_agent_mesh/agent/agent_llm_detail.txt +1702 -0
- solace_agent_mesh/agent/protocol/__init__.py +0 -0
- solace_agent_mesh/agent/protocol/event_handlers.py +2041 -0
- solace_agent_mesh/agent/protocol/protocol_llm.txt +81 -0
- solace_agent_mesh/agent/protocol/protocol_llm_detail.txt +92 -0
- solace_agent_mesh/agent/proxies/__init__.py +0 -0
- solace_agent_mesh/agent/proxies/a2a/__init__.py +3 -0
- solace_agent_mesh/agent/proxies/a2a/a2a_llm.txt +190 -0
- solace_agent_mesh/agent/proxies/a2a/app.py +56 -0
- solace_agent_mesh/agent/proxies/a2a/component.py +1585 -0
- solace_agent_mesh/agent/proxies/a2a/config.py +216 -0
- solace_agent_mesh/agent/proxies/a2a/oauth_token_cache.py +104 -0
- solace_agent_mesh/agent/proxies/base/__init__.py +3 -0
- solace_agent_mesh/agent/proxies/base/app.py +100 -0
- solace_agent_mesh/agent/proxies/base/base_llm.txt +148 -0
- solace_agent_mesh/agent/proxies/base/component.py +816 -0
- solace_agent_mesh/agent/proxies/base/config.py +85 -0
- solace_agent_mesh/agent/proxies/base/proxy_task_context.py +19 -0
- solace_agent_mesh/agent/proxies/proxies_llm.txt +283 -0
- solace_agent_mesh/agent/sac/__init__.py +0 -0
- solace_agent_mesh/agent/sac/app.py +595 -0
- solace_agent_mesh/agent/sac/component.py +3668 -0
- solace_agent_mesh/agent/sac/patch_adk.py +103 -0
- solace_agent_mesh/agent/sac/sac_llm.txt +189 -0
- solace_agent_mesh/agent/sac/sac_llm_detail.txt +200 -0
- solace_agent_mesh/agent/sac/task_execution_context.py +415 -0
- solace_agent_mesh/agent/testing/__init__.py +3 -0
- solace_agent_mesh/agent/testing/debug_utils.py +135 -0
- solace_agent_mesh/agent/testing/testing_llm.txt +58 -0
- solace_agent_mesh/agent/testing/testing_llm_detail.txt +68 -0
- solace_agent_mesh/agent/tools/__init__.py +16 -0
- solace_agent_mesh/agent/tools/audio_tools.py +1740 -0
- solace_agent_mesh/agent/tools/builtin_artifact_tools.py +2500 -0
- solace_agent_mesh/agent/tools/builtin_data_analysis_tools.py +244 -0
- solace_agent_mesh/agent/tools/dynamic_tool.py +396 -0
- solace_agent_mesh/agent/tools/general_agent_tools.py +572 -0
- solace_agent_mesh/agent/tools/image_tools.py +1185 -0
- solace_agent_mesh/agent/tools/peer_agent_tool.py +363 -0
- solace_agent_mesh/agent/tools/registry.py +38 -0
- solace_agent_mesh/agent/tools/test_tools.py +136 -0
- solace_agent_mesh/agent/tools/time_tools.py +126 -0
- solace_agent_mesh/agent/tools/tool_config_types.py +93 -0
- solace_agent_mesh/agent/tools/tool_definition.py +53 -0
- solace_agent_mesh/agent/tools/tools_llm.txt +276 -0
- solace_agent_mesh/agent/tools/tools_llm_detail.txt +275 -0
- solace_agent_mesh/agent/tools/web_tools.py +392 -0
- solace_agent_mesh/agent/utils/__init__.py +0 -0
- solace_agent_mesh/agent/utils/artifact_helpers.py +1353 -0
- solace_agent_mesh/agent/utils/config_parser.py +49 -0
- solace_agent_mesh/agent/utils/context_helpers.py +77 -0
- solace_agent_mesh/agent/utils/utils_llm.txt +152 -0
- solace_agent_mesh/agent/utils/utils_llm_detail.txt +149 -0
- solace_agent_mesh/assets/docs/404.html +16 -0
- solace_agent_mesh/assets/docs/assets/css/styles.8162edfb.css +1 -0
- solace_agent_mesh/assets/docs/assets/images/Solace_AI_Framework_With_Broker-85f0a306a9bcdd20b390b7a949f6d862.png +0 -0
- solace_agent_mesh/assets/docs/assets/images/sam-enterprise-credentials-b269f095349473118b2b33bdfcc40122.png +0 -0
- solace_agent_mesh/assets/docs/assets/js/032c2d61.f3d37824.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/05749d90.19ac4f35.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/0bcf40b7.c019ad46.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/1001.0182a8bd.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/1039.0bd46aa1.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/149.b797a808.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/15ba94aa.92fea363.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/15e40e79.434bb30f.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/165.6a39807d.js +2 -0
- solace_agent_mesh/assets/docs/assets/js/165.6a39807d.js.LICENSE.txt +9 -0
- solace_agent_mesh/assets/docs/assets/js/17896441.e612dfb4.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/2130.ab9fd314.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/2131ec11.5c7a1f6e.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/2237.5e477fc6.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/2279.550aa580.js +2 -0
- solace_agent_mesh/assets/docs/assets/js/2279.550aa580.js.LICENSE.txt +13 -0
- solace_agent_mesh/assets/docs/assets/js/2334.1cf50a20.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/240a0364.9ad94d1b.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/2987107d.a80604f9.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/2e32b5e0.33f5d75b.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/3219.adc1d663.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/341393d4.0fac2613.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/3624.0eaa1fd0.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/375.708d48db.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/3834.b6cd790e.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/3a6c6137.f5940cfa.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/3ac1795d.28b7c67b.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/3ff0015d.2ddc75c0.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/41adc471.48b12a4e.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/4250.95455b28.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/4356.d169ab5b.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/4458.518e66fa.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/4488.c7cc3442.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/4494.6ee23046.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/4855.fc4444b6.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/4866.22daefc0.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/4950.ca4caeda.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/509e993c.a1fbf45a.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/5388.7a136447.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/547e15cc.2f7790c1.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/55b7b518.29d6e75d.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/5607.081356f8.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/5864.b0d0e9de.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/5c2bd65f.90a87880.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/5e95c892.558d5167.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/6063ff4c.ef84f702.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/60702c0e.a8bdd79b.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/6143.0a1464c9.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/631738c7.fa471607.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/6395.e9c73649.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/64195356.c498c4d0.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/66d4869e.b77431fc.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/6796.51d2c9b7.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/6976.379be23b.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/6978.ee0b945c.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/6a520c9d.b6e3f2ce.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/6aaedf65.7253541d.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/6ad8f0bd.a5b36a60.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/6d84eae0.fd23ba4a.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/6fdfefc7.99de744e.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/7040.cb436723.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/7195.412f418a.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/71da7b71.374b9d54.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/722f809d.965da774.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/7280.3fb73bdb.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/742f027b.46c07808.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/77cf947d.48cb18a2.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/7845.e33e7c4c.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/7900.69516146.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/8024126c.fa0e7186.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/81a99df0.2484b8d9.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/82fbfb93.161823a5.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/8356.8a379c04.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/8567.4732c6b7.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/8573.cb04eda5.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/8577.1d54e766.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/8591.5d015485.js +2 -0
- solace_agent_mesh/assets/docs/assets/js/8591.5d015485.js.LICENSE.txt +61 -0
- solace_agent_mesh/assets/docs/assets/js/8709.7ecd4047.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/8731.6c1dbf0c.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/8908.f9d1b506.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/8b032486.91a91afc.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/9157.b4093d07.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/924ffdeb.975e428a.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/9278.a4fd875d.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/945fb41e.6f4cdffd.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/94e8668d.16083b3f.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/9616.b75c2f6d.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/9793.c6d16376.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/9bb13469.b2333011.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/9e9d0a82.570c057b.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/a7bd4aaa.2204d2f7.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/a94703ab.3e5fbcb3.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/ab9708a8.245ae0ef.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/aba21aa0.c42a534c.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/ad71b5ed.af3ecfd1.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/ad87452a.9d73dad6.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/c198a0dc.8f31f867.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/c93cbaa0.0e0d8baf.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/cab03b5b.6a073091.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/cbe2e9ea.07e170dd.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/ceb2a7a6.5d92d7d0.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/da0b5bad.b62f7b08.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/db5d6442.3daf1696.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/db924877.e98d12a1.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/dd817ffc.c37a755e.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/dd81e2b8.b682e9c2.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/de5f4c65.e8241890.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/de915948.44a432bc.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/e04b235d.52cb25ed.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/e1b6eeb4.b1068f9b.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/e3d9abda.1476f570.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/e6f9706b.4488e34c.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/e92d0134.3bda61dd.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/f284c35a.250993bf.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/ff4d71f2.74710fc1.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/main.7acf7ace.js +2 -0
- solace_agent_mesh/assets/docs/assets/js/main.7acf7ace.js.LICENSE.txt +81 -0
- solace_agent_mesh/assets/docs/assets/js/runtime~main.9e0813a2.js +1 -0
- solace_agent_mesh/assets/docs/docs/documentation/components/agents/index.html +154 -0
- solace_agent_mesh/assets/docs/docs/documentation/components/builtin-tools/artifact-management/index.html +99 -0
- solace_agent_mesh/assets/docs/docs/documentation/components/builtin-tools/audio-tools/index.html +90 -0
- solace_agent_mesh/assets/docs/docs/documentation/components/builtin-tools/data-analysis-tools/index.html +107 -0
- solace_agent_mesh/assets/docs/docs/documentation/components/builtin-tools/embeds/index.html +166 -0
- solace_agent_mesh/assets/docs/docs/documentation/components/builtin-tools/index.html +101 -0
- solace_agent_mesh/assets/docs/docs/documentation/components/cli/index.html +219 -0
- solace_agent_mesh/assets/docs/docs/documentation/components/gateways/index.html +92 -0
- solace_agent_mesh/assets/docs/docs/documentation/components/index.html +29 -0
- solace_agent_mesh/assets/docs/docs/documentation/components/orchestrator/index.html +55 -0
- solace_agent_mesh/assets/docs/docs/documentation/components/plugins/index.html +110 -0
- solace_agent_mesh/assets/docs/docs/documentation/components/projects/index.html +182 -0
- solace_agent_mesh/assets/docs/docs/documentation/components/prompts/index.html +147 -0
- solace_agent_mesh/assets/docs/docs/documentation/components/proxies/index.html +345 -0
- solace_agent_mesh/assets/docs/docs/documentation/components/speech/index.html +52 -0
- solace_agent_mesh/assets/docs/docs/documentation/deploying/debugging/index.html +83 -0
- solace_agent_mesh/assets/docs/docs/documentation/deploying/deployment-options/index.html +84 -0
- solace_agent_mesh/assets/docs/docs/documentation/deploying/index.html +25 -0
- solace_agent_mesh/assets/docs/docs/documentation/deploying/kubernetes-deployment/index.html +47 -0
- solace_agent_mesh/assets/docs/docs/documentation/deploying/logging/index.html +85 -0
- solace_agent_mesh/assets/docs/docs/documentation/deploying/observability/index.html +60 -0
- solace_agent_mesh/assets/docs/docs/documentation/deploying/proxy_configuration/index.html +49 -0
- solace_agent_mesh/assets/docs/docs/documentation/developing/create-agents/index.html +144 -0
- solace_agent_mesh/assets/docs/docs/documentation/developing/create-gateways/index.html +191 -0
- solace_agent_mesh/assets/docs/docs/documentation/developing/creating-python-tools/index.html +128 -0
- solace_agent_mesh/assets/docs/docs/documentation/developing/creating-service-providers/index.html +54 -0
- solace_agent_mesh/assets/docs/docs/documentation/developing/evaluations/index.html +135 -0
- solace_agent_mesh/assets/docs/docs/documentation/developing/index.html +34 -0
- solace_agent_mesh/assets/docs/docs/documentation/developing/structure/index.html +55 -0
- solace_agent_mesh/assets/docs/docs/documentation/developing/tutorials/bedrock-agents/index.html +267 -0
- solace_agent_mesh/assets/docs/docs/documentation/developing/tutorials/custom-agent/index.html +142 -0
- solace_agent_mesh/assets/docs/docs/documentation/developing/tutorials/event-mesh-gateway/index.html +116 -0
- solace_agent_mesh/assets/docs/docs/documentation/developing/tutorials/mcp-integration/index.html +86 -0
- solace_agent_mesh/assets/docs/docs/documentation/developing/tutorials/mongodb-integration/index.html +164 -0
- solace_agent_mesh/assets/docs/docs/documentation/developing/tutorials/rag-integration/index.html +140 -0
- solace_agent_mesh/assets/docs/docs/documentation/developing/tutorials/rest-gateway/index.html +57 -0
- solace_agent_mesh/assets/docs/docs/documentation/developing/tutorials/slack-integration/index.html +72 -0
- solace_agent_mesh/assets/docs/docs/documentation/developing/tutorials/sql-database/index.html +102 -0
- solace_agent_mesh/assets/docs/docs/documentation/developing/tutorials/teams-integration/index.html +115 -0
- solace_agent_mesh/assets/docs/docs/documentation/enterprise/agent-builder/index.html +86 -0
- solace_agent_mesh/assets/docs/docs/documentation/enterprise/connectors/index.html +67 -0
- solace_agent_mesh/assets/docs/docs/documentation/enterprise/index.html +37 -0
- solace_agent_mesh/assets/docs/docs/documentation/enterprise/installation/index.html +86 -0
- solace_agent_mesh/assets/docs/docs/documentation/enterprise/openapi-tools/index.html +324 -0
- solace_agent_mesh/assets/docs/docs/documentation/enterprise/rbac-setup-guide/index.html +247 -0
- solace_agent_mesh/assets/docs/docs/documentation/enterprise/secure-user-delegated-access/index.html +440 -0
- solace_agent_mesh/assets/docs/docs/documentation/enterprise/single-sign-on/index.html +184 -0
- solace_agent_mesh/assets/docs/docs/documentation/enterprise/wheel-installation/index.html +62 -0
- solace_agent_mesh/assets/docs/docs/documentation/getting-started/architecture/index.html +75 -0
- solace_agent_mesh/assets/docs/docs/documentation/getting-started/index.html +54 -0
- solace_agent_mesh/assets/docs/docs/documentation/getting-started/introduction/index.html +85 -0
- solace_agent_mesh/assets/docs/docs/documentation/getting-started/try-agent-mesh/index.html +41 -0
- solace_agent_mesh/assets/docs/docs/documentation/installing-and-configuring/artifact-storage/index.html +290 -0
- solace_agent_mesh/assets/docs/docs/documentation/installing-and-configuring/configurations/index.html +78 -0
- solace_agent_mesh/assets/docs/docs/documentation/installing-and-configuring/index.html +25 -0
- solace_agent_mesh/assets/docs/docs/documentation/installing-and-configuring/installation/index.html +78 -0
- solace_agent_mesh/assets/docs/docs/documentation/installing-and-configuring/large_language_models/index.html +160 -0
- solace_agent_mesh/assets/docs/docs/documentation/installing-and-configuring/run-project/index.html +142 -0
- solace_agent_mesh/assets/docs/docs/documentation/installing-and-configuring/session-storage/index.html +251 -0
- solace_agent_mesh/assets/docs/docs/documentation/installing-and-configuring/user-feedback/index.html +88 -0
- solace_agent_mesh/assets/docs/docs/documentation/migrations/a2a-upgrade/a2a-gateway-upgrade-to-0.3.0/index.html +100 -0
- solace_agent_mesh/assets/docs/docs/documentation/migrations/a2a-upgrade/a2a-technical-migration-map/index.html +52 -0
- solace_agent_mesh/assets/docs/img/Solace_AI_Framework_With_Broker.png +0 -0
- solace_agent_mesh/assets/docs/img/logo.png +0 -0
- solace_agent_mesh/assets/docs/img/sac-flows.png +0 -0
- solace_agent_mesh/assets/docs/img/sac_parts_of_a_component.png +0 -0
- solace_agent_mesh/assets/docs/img/sam-enterprise-credentials.png +0 -0
- solace_agent_mesh/assets/docs/img/solace-logo-text.svg +18 -0
- solace_agent_mesh/assets/docs/img/solace-logo.png +0 -0
- solace_agent_mesh/assets/docs/lunr-index-1765810064709.json +1 -0
- solace_agent_mesh/assets/docs/lunr-index.json +1 -0
- solace_agent_mesh/assets/docs/search-doc-1765810064709.json +1 -0
- solace_agent_mesh/assets/docs/search-doc.json +1 -0
- solace_agent_mesh/assets/docs/sitemap.xml +1 -0
- solace_agent_mesh/cli/__init__.py +1 -0
- solace_agent_mesh/cli/commands/__init__.py +0 -0
- solace_agent_mesh/cli/commands/add_cmd/__init__.py +15 -0
- solace_agent_mesh/cli/commands/add_cmd/add_cmd_llm.txt +250 -0
- solace_agent_mesh/cli/commands/add_cmd/agent_cmd.py +729 -0
- solace_agent_mesh/cli/commands/add_cmd/gateway_cmd.py +322 -0
- solace_agent_mesh/cli/commands/add_cmd/web_add_agent_step.py +102 -0
- solace_agent_mesh/cli/commands/add_cmd/web_add_gateway_step.py +114 -0
- solace_agent_mesh/cli/commands/docs_cmd.py +60 -0
- solace_agent_mesh/cli/commands/eval_cmd.py +46 -0
- solace_agent_mesh/cli/commands/init_cmd/__init__.py +439 -0
- solace_agent_mesh/cli/commands/init_cmd/broker_step.py +201 -0
- solace_agent_mesh/cli/commands/init_cmd/database_step.py +91 -0
- solace_agent_mesh/cli/commands/init_cmd/directory_step.py +28 -0
- solace_agent_mesh/cli/commands/init_cmd/env_step.py +238 -0
- solace_agent_mesh/cli/commands/init_cmd/init_cmd_llm.txt +365 -0
- solace_agent_mesh/cli/commands/init_cmd/orchestrator_step.py +464 -0
- solace_agent_mesh/cli/commands/init_cmd/project_files_step.py +38 -0
- solace_agent_mesh/cli/commands/init_cmd/web_init_step.py +119 -0
- solace_agent_mesh/cli/commands/init_cmd/webui_gateway_step.py +215 -0
- solace_agent_mesh/cli/commands/plugin_cmd/__init__.py +20 -0
- solace_agent_mesh/cli/commands/plugin_cmd/add_cmd.py +137 -0
- solace_agent_mesh/cli/commands/plugin_cmd/build_cmd.py +86 -0
- solace_agent_mesh/cli/commands/plugin_cmd/catalog_cmd.py +144 -0
- solace_agent_mesh/cli/commands/plugin_cmd/create_cmd.py +306 -0
- solace_agent_mesh/cli/commands/plugin_cmd/install_cmd.py +283 -0
- solace_agent_mesh/cli/commands/plugin_cmd/official_registry.py +175 -0
- solace_agent_mesh/cli/commands/plugin_cmd/plugin_cmd_llm.txt +305 -0
- solace_agent_mesh/cli/commands/run_cmd.py +215 -0
- solace_agent_mesh/cli/main.py +52 -0
- solace_agent_mesh/cli/utils.py +262 -0
- solace_agent_mesh/client/webui/frontend/static/assets/authCallback-Dj3JtK42.js +1 -0
- solace_agent_mesh/client/webui/frontend/static/assets/client-ZKk9kEJ5.js +25 -0
- solace_agent_mesh/client/webui/frontend/static/assets/favicon-BLgzUch9.ico +0 -0
- solace_agent_mesh/client/webui/frontend/static/assets/main-BcUaNZ-Q.css +1 -0
- solace_agent_mesh/client/webui/frontend/static/assets/main-vjch4RYc.js +435 -0
- solace_agent_mesh/client/webui/frontend/static/assets/vendor-BNV4kZN0.js +535 -0
- solace_agent_mesh/client/webui/frontend/static/auth-callback.html +15 -0
- solace_agent_mesh/client/webui/frontend/static/index.html +16 -0
- solace_agent_mesh/client/webui/frontend/static/mockServiceWorker.js +336 -0
- solace_agent_mesh/client/webui/frontend/static/ui-version.json +6 -0
- solace_agent_mesh/common/__init__.py +1 -0
- solace_agent_mesh/common/a2a/__init__.py +241 -0
- solace_agent_mesh/common/a2a/a2a_llm.txt +175 -0
- solace_agent_mesh/common/a2a/a2a_llm_detail.txt +193 -0
- solace_agent_mesh/common/a2a/artifact.py +368 -0
- solace_agent_mesh/common/a2a/events.py +213 -0
- solace_agent_mesh/common/a2a/message.py +375 -0
- solace_agent_mesh/common/a2a/protocol.py +689 -0
- solace_agent_mesh/common/a2a/task.py +127 -0
- solace_agent_mesh/common/a2a/translation.py +655 -0
- solace_agent_mesh/common/a2a/types.py +55 -0
- solace_agent_mesh/common/a2a_spec/a2a.json +2576 -0
- solace_agent_mesh/common/a2a_spec/a2a_spec_llm.txt +445 -0
- solace_agent_mesh/common/a2a_spec/a2a_spec_llm_detail.txt +736 -0
- solace_agent_mesh/common/a2a_spec/schemas/agent_progress_update.json +18 -0
- solace_agent_mesh/common/a2a_spec/schemas/artifact_creation_progress.json +48 -0
- solace_agent_mesh/common/a2a_spec/schemas/feedback_event.json +51 -0
- solace_agent_mesh/common/a2a_spec/schemas/llm_invocation.json +41 -0
- solace_agent_mesh/common/a2a_spec/schemas/schemas_llm.txt +330 -0
- solace_agent_mesh/common/a2a_spec/schemas/tool_invocation_start.json +26 -0
- solace_agent_mesh/common/a2a_spec/schemas/tool_result.json +48 -0
- solace_agent_mesh/common/agent_registry.py +122 -0
- solace_agent_mesh/common/common_llm.txt +230 -0
- solace_agent_mesh/common/common_llm_detail.txt +2562 -0
- solace_agent_mesh/common/constants.py +6 -0
- solace_agent_mesh/common/data_parts.py +150 -0
- solace_agent_mesh/common/exceptions.py +49 -0
- solace_agent_mesh/common/middleware/__init__.py +12 -0
- solace_agent_mesh/common/middleware/config_resolver.py +132 -0
- solace_agent_mesh/common/middleware/middleware_llm.txt +174 -0
- solace_agent_mesh/common/middleware/middleware_llm_detail.txt +185 -0
- solace_agent_mesh/common/middleware/registry.py +127 -0
- solace_agent_mesh/common/oauth/__init__.py +17 -0
- solace_agent_mesh/common/oauth/oauth_client.py +408 -0
- solace_agent_mesh/common/oauth/utils.py +50 -0
- solace_agent_mesh/common/sac/__init__.py +0 -0
- solace_agent_mesh/common/sac/sac_llm.txt +71 -0
- solace_agent_mesh/common/sac/sac_llm_detail.txt +82 -0
- solace_agent_mesh/common/sac/sam_component_base.py +730 -0
- solace_agent_mesh/common/sam_events/__init__.py +9 -0
- solace_agent_mesh/common/sam_events/event_service.py +208 -0
- solace_agent_mesh/common/sam_events/sam_events_llm.txt +104 -0
- solace_agent_mesh/common/sam_events/sam_events_llm_detail.txt +115 -0
- solace_agent_mesh/common/services/__init__.py +4 -0
- solace_agent_mesh/common/services/employee_service.py +164 -0
- solace_agent_mesh/common/services/identity_service.py +134 -0
- solace_agent_mesh/common/services/providers/__init__.py +4 -0
- solace_agent_mesh/common/services/providers/local_file_identity_service.py +151 -0
- solace_agent_mesh/common/services/providers/providers_llm.txt +81 -0
- solace_agent_mesh/common/services/services_llm.txt +368 -0
- solace_agent_mesh/common/services/services_llm_detail.txt +459 -0
- solace_agent_mesh/common/utils/__init__.py +7 -0
- solace_agent_mesh/common/utils/artifact_utils.py +31 -0
- solace_agent_mesh/common/utils/asyncio_macos_fix.py +88 -0
- solace_agent_mesh/common/utils/embeds/__init__.py +33 -0
- solace_agent_mesh/common/utils/embeds/constants.py +56 -0
- solace_agent_mesh/common/utils/embeds/converter.py +447 -0
- solace_agent_mesh/common/utils/embeds/embeds_llm.txt +220 -0
- solace_agent_mesh/common/utils/embeds/evaluators.py +395 -0
- solace_agent_mesh/common/utils/embeds/modifiers.py +793 -0
- solace_agent_mesh/common/utils/embeds/resolver.py +967 -0
- solace_agent_mesh/common/utils/embeds/types.py +23 -0
- solace_agent_mesh/common/utils/in_memory_cache.py +108 -0
- solace_agent_mesh/common/utils/initializer.py +52 -0
- solace_agent_mesh/common/utils/log_formatters.py +64 -0
- solace_agent_mesh/common/utils/message_utils.py +80 -0
- solace_agent_mesh/common/utils/mime_helpers.py +172 -0
- solace_agent_mesh/common/utils/push_notification_auth.py +135 -0
- solace_agent_mesh/common/utils/pydantic_utils.py +159 -0
- solace_agent_mesh/common/utils/rbac_utils.py +69 -0
- solace_agent_mesh/common/utils/templates/__init__.py +8 -0
- solace_agent_mesh/common/utils/templates/liquid_renderer.py +210 -0
- solace_agent_mesh/common/utils/templates/template_resolver.py +161 -0
- solace_agent_mesh/common/utils/type_utils.py +28 -0
- solace_agent_mesh/common/utils/utils_llm.txt +335 -0
- solace_agent_mesh/common/utils/utils_llm_detail.txt +572 -0
- solace_agent_mesh/config_portal/__init__.py +0 -0
- solace_agent_mesh/config_portal/backend/__init__.py +0 -0
- solace_agent_mesh/config_portal/backend/common.py +77 -0
- solace_agent_mesh/config_portal/backend/plugin_catalog/__init__.py +0 -0
- solace_agent_mesh/config_portal/backend/plugin_catalog/constants.py +24 -0
- solace_agent_mesh/config_portal/backend/plugin_catalog/models.py +49 -0
- solace_agent_mesh/config_portal/backend/plugin_catalog/registry_manager.py +166 -0
- solace_agent_mesh/config_portal/backend/plugin_catalog/scraper.py +521 -0
- solace_agent_mesh/config_portal/backend/plugin_catalog_server.py +217 -0
- solace_agent_mesh/config_portal/backend/server.py +644 -0
- solace_agent_mesh/config_portal/frontend/static/client/Solace_community_logo.png +0 -0
- solace_agent_mesh/config_portal/frontend/static/client/assets/_index-DiOiAjzL.js +103 -0
- solace_agent_mesh/config_portal/frontend/static/client/assets/components-Rk0n-9cK.js +140 -0
- solace_agent_mesh/config_portal/frontend/static/client/assets/entry.client-mvZjNKiz.js +19 -0
- solace_agent_mesh/config_portal/frontend/static/client/assets/index-DzNKzXrc.js +68 -0
- solace_agent_mesh/config_portal/frontend/static/client/assets/manifest-ba77705e.js +1 -0
- solace_agent_mesh/config_portal/frontend/static/client/assets/root-B17tZKK7.css +1 -0
- solace_agent_mesh/config_portal/frontend/static/client/assets/root-V2BeTIUc.js +10 -0
- solace_agent_mesh/config_portal/frontend/static/client/favicon.ico +0 -0
- solace_agent_mesh/config_portal/frontend/static/client/index.html +7 -0
- solace_agent_mesh/core_a2a/__init__.py +1 -0
- solace_agent_mesh/core_a2a/core_a2a_llm.txt +90 -0
- solace_agent_mesh/core_a2a/core_a2a_llm_detail.txt +101 -0
- solace_agent_mesh/core_a2a/service.py +307 -0
- solace_agent_mesh/evaluation/__init__.py +0 -0
- solace_agent_mesh/evaluation/evaluator.py +691 -0
- solace_agent_mesh/evaluation/message_organizer.py +553 -0
- solace_agent_mesh/evaluation/report/benchmark_info.html +35 -0
- solace_agent_mesh/evaluation/report/chart_section.html +141 -0
- solace_agent_mesh/evaluation/report/detailed_breakdown.html +28 -0
- solace_agent_mesh/evaluation/report/modal.html +59 -0
- solace_agent_mesh/evaluation/report/modal_chart_functions.js +411 -0
- solace_agent_mesh/evaluation/report/modal_script.js +296 -0
- solace_agent_mesh/evaluation/report/modal_styles.css +340 -0
- solace_agent_mesh/evaluation/report/performance_metrics_styles.css +93 -0
- solace_agent_mesh/evaluation/report/templates/footer.html +2 -0
- solace_agent_mesh/evaluation/report/templates/header.html +340 -0
- solace_agent_mesh/evaluation/report_data_processor.py +970 -0
- solace_agent_mesh/evaluation/report_generator.py +607 -0
- solace_agent_mesh/evaluation/run.py +954 -0
- solace_agent_mesh/evaluation/shared/__init__.py +92 -0
- solace_agent_mesh/evaluation/shared/constants.py +47 -0
- solace_agent_mesh/evaluation/shared/exceptions.py +50 -0
- solace_agent_mesh/evaluation/shared/helpers.py +35 -0
- solace_agent_mesh/evaluation/shared/test_case_loader.py +167 -0
- solace_agent_mesh/evaluation/shared/test_suite_loader.py +280 -0
- solace_agent_mesh/evaluation/subscriber.py +776 -0
- solace_agent_mesh/evaluation/summary_builder.py +880 -0
- solace_agent_mesh/gateway/__init__.py +0 -0
- solace_agent_mesh/gateway/adapter/__init__.py +1 -0
- solace_agent_mesh/gateway/adapter/base.py +143 -0
- solace_agent_mesh/gateway/adapter/types.py +221 -0
- solace_agent_mesh/gateway/base/__init__.py +1 -0
- solace_agent_mesh/gateway/base/app.py +345 -0
- solace_agent_mesh/gateway/base/base_llm.txt +226 -0
- solace_agent_mesh/gateway/base/base_llm_detail.txt +235 -0
- solace_agent_mesh/gateway/base/component.py +2030 -0
- solace_agent_mesh/gateway/base/task_context.py +75 -0
- solace_agent_mesh/gateway/gateway_llm.txt +369 -0
- solace_agent_mesh/gateway/gateway_llm_detail.txt +3885 -0
- solace_agent_mesh/gateway/generic/__init__.py +1 -0
- solace_agent_mesh/gateway/generic/app.py +50 -0
- solace_agent_mesh/gateway/generic/component.py +727 -0
- solace_agent_mesh/gateway/http_sse/__init__.py +0 -0
- solace_agent_mesh/gateway/http_sse/alembic/alembic_llm.txt +345 -0
- solace_agent_mesh/gateway/http_sse/alembic/env.py +87 -0
- solace_agent_mesh/gateway/http_sse/alembic/script.py.mako +28 -0
- solace_agent_mesh/gateway/http_sse/alembic/versions/20250910_d5b3f8f2e9a0_create_initial_database.py +58 -0
- solace_agent_mesh/gateway/http_sse/alembic/versions/20250911_b1c2d3e4f5g6_add_database_indexes.py +83 -0
- solace_agent_mesh/gateway/http_sse/alembic/versions/20250916_f6e7d8c9b0a1_convert_timestamps_to_epoch_and_align_columns.py +412 -0
- solace_agent_mesh/gateway/http_sse/alembic/versions/20251006_98882922fa59_add_tasks_events_feedback_chat_tasks.py +190 -0
- solace_agent_mesh/gateway/http_sse/alembic/versions/20251015_add_session_performance_indexes.py +70 -0
- solace_agent_mesh/gateway/http_sse/alembic/versions/20251023_add_project_users_table.py +72 -0
- solace_agent_mesh/gateway/http_sse/alembic/versions/20251023_add_soft_delete_and_search.py +109 -0
- solace_agent_mesh/gateway/http_sse/alembic/versions/20251024_add_default_agent_to_projects.py +26 -0
- solace_agent_mesh/gateway/http_sse/alembic/versions/20251024_add_projects_table.py +135 -0
- solace_agent_mesh/gateway/http_sse/alembic/versions/20251108_create_prompt_tables_with_sharing.py +154 -0
- solace_agent_mesh/gateway/http_sse/alembic/versions/20251115_add_parent_task_id.py +32 -0
- solace_agent_mesh/gateway/http_sse/alembic/versions/20251126_add_background_task_fields.py +47 -0
- solace_agent_mesh/gateway/http_sse/alembic/versions/20251202_add_versioned_fields_to_prompts.py +52 -0
- solace_agent_mesh/gateway/http_sse/alembic/versions/versions_llm.txt +161 -0
- solace_agent_mesh/gateway/http_sse/alembic.ini +109 -0
- solace_agent_mesh/gateway/http_sse/app.py +351 -0
- solace_agent_mesh/gateway/http_sse/component.py +2360 -0
- solace_agent_mesh/gateway/http_sse/components/__init__.py +7 -0
- solace_agent_mesh/gateway/http_sse/components/components_llm.txt +105 -0
- solace_agent_mesh/gateway/http_sse/components/task_logger_forwarder.py +109 -0
- solace_agent_mesh/gateway/http_sse/components/visualization_forwarder_component.py +110 -0
- solace_agent_mesh/gateway/http_sse/dependencies.py +653 -0
- solace_agent_mesh/gateway/http_sse/http_sse_llm.txt +299 -0
- solace_agent_mesh/gateway/http_sse/http_sse_llm_detail.txt +3278 -0
- solace_agent_mesh/gateway/http_sse/main.py +789 -0
- solace_agent_mesh/gateway/http_sse/repository/__init__.py +46 -0
- solace_agent_mesh/gateway/http_sse/repository/chat_task_repository.py +102 -0
- solace_agent_mesh/gateway/http_sse/repository/entities/__init__.py +11 -0
- solace_agent_mesh/gateway/http_sse/repository/entities/chat_task.py +75 -0
- solace_agent_mesh/gateway/http_sse/repository/entities/entities_llm.txt +221 -0
- solace_agent_mesh/gateway/http_sse/repository/entities/feedback.py +20 -0
- solace_agent_mesh/gateway/http_sse/repository/entities/project.py +81 -0
- solace_agent_mesh/gateway/http_sse/repository/entities/project_user.py +47 -0
- solace_agent_mesh/gateway/http_sse/repository/entities/session.py +66 -0
- solace_agent_mesh/gateway/http_sse/repository/entities/session_history.py +0 -0
- solace_agent_mesh/gateway/http_sse/repository/entities/task.py +32 -0
- solace_agent_mesh/gateway/http_sse/repository/entities/task_event.py +21 -0
- solace_agent_mesh/gateway/http_sse/repository/feedback_repository.py +125 -0
- solace_agent_mesh/gateway/http_sse/repository/interfaces.py +239 -0
- solace_agent_mesh/gateway/http_sse/repository/models/__init__.py +34 -0
- solace_agent_mesh/gateway/http_sse/repository/models/base.py +7 -0
- solace_agent_mesh/gateway/http_sse/repository/models/chat_task_model.py +31 -0
- solace_agent_mesh/gateway/http_sse/repository/models/feedback_model.py +21 -0
- solace_agent_mesh/gateway/http_sse/repository/models/models_llm.txt +257 -0
- solace_agent_mesh/gateway/http_sse/repository/models/project_model.py +51 -0
- solace_agent_mesh/gateway/http_sse/repository/models/project_user_model.py +75 -0
- solace_agent_mesh/gateway/http_sse/repository/models/prompt_model.py +159 -0
- solace_agent_mesh/gateway/http_sse/repository/models/session_model.py +53 -0
- solace_agent_mesh/gateway/http_sse/repository/models/task_event_model.py +25 -0
- solace_agent_mesh/gateway/http_sse/repository/models/task_model.py +39 -0
- solace_agent_mesh/gateway/http_sse/repository/project_repository.py +172 -0
- solace_agent_mesh/gateway/http_sse/repository/project_user_repository.py +186 -0
- solace_agent_mesh/gateway/http_sse/repository/repository_llm.txt +308 -0
- solace_agent_mesh/gateway/http_sse/repository/session_repository.py +268 -0
- solace_agent_mesh/gateway/http_sse/repository/task_repository.py +248 -0
- solace_agent_mesh/gateway/http_sse/routers/__init__.py +4 -0
- solace_agent_mesh/gateway/http_sse/routers/agent_cards.py +74 -0
- solace_agent_mesh/gateway/http_sse/routers/artifacts.py +1137 -0
- solace_agent_mesh/gateway/http_sse/routers/auth.py +311 -0
- solace_agent_mesh/gateway/http_sse/routers/config.py +371 -0
- solace_agent_mesh/gateway/http_sse/routers/dto/__init__.py +10 -0
- solace_agent_mesh/gateway/http_sse/routers/dto/dto_llm.txt +450 -0
- solace_agent_mesh/gateway/http_sse/routers/dto/project_dto.py +69 -0
- solace_agent_mesh/gateway/http_sse/routers/dto/prompt_dto.py +255 -0
- solace_agent_mesh/gateway/http_sse/routers/dto/requests/__init__.py +15 -0
- solace_agent_mesh/gateway/http_sse/routers/dto/requests/project_requests.py +48 -0
- solace_agent_mesh/gateway/http_sse/routers/dto/requests/requests_llm.txt +133 -0
- solace_agent_mesh/gateway/http_sse/routers/dto/requests/session_requests.py +33 -0
- solace_agent_mesh/gateway/http_sse/routers/dto/requests/task_requests.py +58 -0
- solace_agent_mesh/gateway/http_sse/routers/dto/responses/__init__.py +18 -0
- solace_agent_mesh/gateway/http_sse/routers/dto/responses/base_responses.py +42 -0
- solace_agent_mesh/gateway/http_sse/routers/dto/responses/project_responses.py +31 -0
- solace_agent_mesh/gateway/http_sse/routers/dto/responses/responses_llm.txt +123 -0
- solace_agent_mesh/gateway/http_sse/routers/dto/responses/session_responses.py +33 -0
- solace_agent_mesh/gateway/http_sse/routers/dto/responses/task_responses.py +30 -0
- solace_agent_mesh/gateway/http_sse/routers/dto/responses/version_responses.py +31 -0
- solace_agent_mesh/gateway/http_sse/routers/feedback.py +168 -0
- solace_agent_mesh/gateway/http_sse/routers/people.py +38 -0
- solace_agent_mesh/gateway/http_sse/routers/projects.py +767 -0
- solace_agent_mesh/gateway/http_sse/routers/prompts.py +1415 -0
- solace_agent_mesh/gateway/http_sse/routers/routers_llm.txt +312 -0
- solace_agent_mesh/gateway/http_sse/routers/sessions.py +634 -0
- solace_agent_mesh/gateway/http_sse/routers/speech.py +355 -0
- solace_agent_mesh/gateway/http_sse/routers/sse.py +230 -0
- solace_agent_mesh/gateway/http_sse/routers/tasks.py +1089 -0
- solace_agent_mesh/gateway/http_sse/routers/users.py +83 -0
- solace_agent_mesh/gateway/http_sse/routers/version.py +343 -0
- solace_agent_mesh/gateway/http_sse/routers/visualization.py +1220 -0
- solace_agent_mesh/gateway/http_sse/services/__init__.py +4 -0
- solace_agent_mesh/gateway/http_sse/services/agent_card_service.py +71 -0
- solace_agent_mesh/gateway/http_sse/services/audio_service.py +1227 -0
- solace_agent_mesh/gateway/http_sse/services/background_task_monitor.py +186 -0
- solace_agent_mesh/gateway/http_sse/services/data_retention_service.py +273 -0
- solace_agent_mesh/gateway/http_sse/services/feedback_service.py +250 -0
- solace_agent_mesh/gateway/http_sse/services/people_service.py +78 -0
- solace_agent_mesh/gateway/http_sse/services/project_service.py +930 -0
- solace_agent_mesh/gateway/http_sse/services/prompt_builder_assistant.py +303 -0
- solace_agent_mesh/gateway/http_sse/services/services_llm.txt +303 -0
- solace_agent_mesh/gateway/http_sse/services/session_service.py +702 -0
- solace_agent_mesh/gateway/http_sse/services/task_logger_service.py +593 -0
- solace_agent_mesh/gateway/http_sse/services/task_service.py +119 -0
- solace_agent_mesh/gateway/http_sse/session_manager.py +219 -0
- solace_agent_mesh/gateway/http_sse/shared/__init__.py +146 -0
- solace_agent_mesh/gateway/http_sse/shared/auth_utils.py +29 -0
- solace_agent_mesh/gateway/http_sse/shared/base_repository.py +252 -0
- solace_agent_mesh/gateway/http_sse/shared/database_exceptions.py +274 -0
- solace_agent_mesh/gateway/http_sse/shared/database_helpers.py +43 -0
- solace_agent_mesh/gateway/http_sse/shared/enums.py +40 -0
- solace_agent_mesh/gateway/http_sse/shared/error_dto.py +107 -0
- solace_agent_mesh/gateway/http_sse/shared/exception_handlers.py +217 -0
- solace_agent_mesh/gateway/http_sse/shared/exceptions.py +192 -0
- solace_agent_mesh/gateway/http_sse/shared/pagination.py +138 -0
- solace_agent_mesh/gateway/http_sse/shared/response_utils.py +134 -0
- solace_agent_mesh/gateway/http_sse/shared/shared_llm.txt +319 -0
- solace_agent_mesh/gateway/http_sse/shared/timestamp_utils.py +97 -0
- solace_agent_mesh/gateway/http_sse/shared/types.py +50 -0
- solace_agent_mesh/gateway/http_sse/shared/utils.py +22 -0
- solace_agent_mesh/gateway/http_sse/sse_event_buffer.py +88 -0
- solace_agent_mesh/gateway/http_sse/sse_manager.py +491 -0
- solace_agent_mesh/gateway/http_sse/utils/__init__.py +1 -0
- solace_agent_mesh/gateway/http_sse/utils/artifact_copy_utils.py +370 -0
- solace_agent_mesh/gateway/http_sse/utils/stim_utils.py +72 -0
- solace_agent_mesh/gateway/http_sse/utils/utils_llm.txt +47 -0
- solace_agent_mesh/llm.txt +228 -0
- solace_agent_mesh/llm_detail.txt +2835 -0
- solace_agent_mesh/services/__init__.py +0 -0
- solace_agent_mesh/services/platform/__init__.py +18 -0
- solace_agent_mesh/services/platform/alembic/env.py +85 -0
- solace_agent_mesh/services/platform/alembic/script.py.mako +28 -0
- solace_agent_mesh/services/platform/alembic.ini +109 -0
- solace_agent_mesh/services/platform/api/__init__.py +3 -0
- solace_agent_mesh/services/platform/api/dependencies.py +147 -0
- solace_agent_mesh/services/platform/api/main.py +280 -0
- solace_agent_mesh/services/platform/api/middleware.py +51 -0
- solace_agent_mesh/services/platform/api/routers/__init__.py +24 -0
- solace_agent_mesh/services/platform/app.py +114 -0
- solace_agent_mesh/services/platform/component.py +235 -0
- solace_agent_mesh/solace_agent_mesh_llm.txt +362 -0
- solace_agent_mesh/solace_agent_mesh_llm_detail.txt +8599 -0
- solace_agent_mesh/templates/agent_template.yaml +53 -0
- solace_agent_mesh/templates/eval_backend_template.yaml +54 -0
- solace_agent_mesh/templates/gateway_app_template.py +75 -0
- solace_agent_mesh/templates/gateway_component_template.py +484 -0
- solace_agent_mesh/templates/gateway_config_template.yaml +38 -0
- solace_agent_mesh/templates/logging_config_template.yaml +48 -0
- solace_agent_mesh/templates/main_orchestrator.yaml +66 -0
- solace_agent_mesh/templates/plugin_agent_config_template.yaml +122 -0
- solace_agent_mesh/templates/plugin_custom_config_template.yaml +27 -0
- solace_agent_mesh/templates/plugin_custom_template.py +10 -0
- solace_agent_mesh/templates/plugin_gateway_config_template.yaml +60 -0
- solace_agent_mesh/templates/plugin_pyproject_template.toml +32 -0
- solace_agent_mesh/templates/plugin_readme_template.md +12 -0
- solace_agent_mesh/templates/plugin_tool_config_template.yaml +109 -0
- solace_agent_mesh/templates/plugin_tools_template.py +224 -0
- solace_agent_mesh/templates/shared_config.yaml +112 -0
- solace_agent_mesh/templates/templates_llm.txt +147 -0
- solace_agent_mesh/templates/webui.yaml +177 -0
- solace_agent_mesh-1.11.2.dist-info/METADATA +504 -0
- solace_agent_mesh-1.11.2.dist-info/RECORD +624 -0
- solace_agent_mesh-1.11.2.dist-info/WHEEL +4 -0
- solace_agent_mesh-1.11.2.dist-info/entry_points.txt +3 -0
- solace_agent_mesh-1.11.2.dist-info/licenses/LICENSE +201 -0
|
@@ -0,0 +1,3668 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Custom Solace AI Connector Component to Host Google ADK Agents via A2A Protocol.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import asyncio
|
|
6
|
+
import concurrent.futures
|
|
7
|
+
import fnmatch
|
|
8
|
+
import functools
|
|
9
|
+
import inspect
|
|
10
|
+
import json
|
|
11
|
+
import logging
|
|
12
|
+
import threading
|
|
13
|
+
import time
|
|
14
|
+
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Union
|
|
15
|
+
|
|
16
|
+
from a2a.types import (
|
|
17
|
+
AgentCard,
|
|
18
|
+
MessageSendParams,
|
|
19
|
+
SendMessageRequest,
|
|
20
|
+
TaskState,
|
|
21
|
+
TaskStatus,
|
|
22
|
+
TaskStatusUpdateEvent,
|
|
23
|
+
)
|
|
24
|
+
from a2a.types import Artifact as A2AArtifact
|
|
25
|
+
from a2a.types import Message as A2AMessage
|
|
26
|
+
from google.adk.agents import LlmAgent, RunConfig
|
|
27
|
+
from google.adk.agents.callback_context import CallbackContext
|
|
28
|
+
from google.adk.agents.invocation_context import LlmCallsLimitExceededError
|
|
29
|
+
from google.adk.agents.readonly_context import ReadonlyContext
|
|
30
|
+
from google.adk.agents.run_config import StreamingMode
|
|
31
|
+
from google.adk.artifacts import BaseArtifactService
|
|
32
|
+
from google.adk.auth.credential_service.base_credential_service import (
|
|
33
|
+
BaseCredentialService,
|
|
34
|
+
)
|
|
35
|
+
from google.adk.events import Event as ADKEvent
|
|
36
|
+
from google.adk.memory import BaseMemoryService
|
|
37
|
+
from google.adk.models import LlmResponse
|
|
38
|
+
from google.adk.models.llm_request import LlmRequest
|
|
39
|
+
from google.adk.runners import Runner
|
|
40
|
+
from google.adk.sessions import BaseSessionService
|
|
41
|
+
from google.adk.tools.mcp_tool import MCPToolset
|
|
42
|
+
from google.adk.tools.openapi_tool import OpenAPIToolset
|
|
43
|
+
from google.genai import types as adk_types
|
|
44
|
+
from pydantic import BaseModel, ValidationError
|
|
45
|
+
from solace_ai_connector.common.event import Event, EventType
|
|
46
|
+
from solace_ai_connector.common.message import Message as SolaceMessage
|
|
47
|
+
from solace_ai_connector.common.utils import import_module
|
|
48
|
+
|
|
49
|
+
from ...agent.adk.runner import TaskCancelledError, run_adk_async_task_thread_wrapper
|
|
50
|
+
from ...agent.adk.services import (
|
|
51
|
+
initialize_artifact_service,
|
|
52
|
+
initialize_credential_service,
|
|
53
|
+
initialize_memory_service,
|
|
54
|
+
initialize_session_service,
|
|
55
|
+
)
|
|
56
|
+
from ...agent.adk.setup import (
|
|
57
|
+
initialize_adk_agent,
|
|
58
|
+
initialize_adk_runner,
|
|
59
|
+
load_adk_tools,
|
|
60
|
+
)
|
|
61
|
+
from ...agent.protocol.event_handlers import process_event, publish_agent_card
|
|
62
|
+
from ...agent.tools.peer_agent_tool import (
|
|
63
|
+
CORRELATION_DATA_PREFIX,
|
|
64
|
+
PEER_TOOL_PREFIX,
|
|
65
|
+
PeerAgentTool,
|
|
66
|
+
)
|
|
67
|
+
from ...agent.tools.registry import tool_registry
|
|
68
|
+
from ...agent.utils.config_parser import resolve_instruction_provider
|
|
69
|
+
from ...common import a2a
|
|
70
|
+
from ...common.a2a.translation import format_and_route_adk_event
|
|
71
|
+
from ...common.agent_registry import AgentRegistry
|
|
72
|
+
from ...common.constants import (
|
|
73
|
+
DEFAULT_COMMUNICATION_TIMEOUT,
|
|
74
|
+
HEALTH_CHECK_INTERVAL_SECONDS,
|
|
75
|
+
HEALTH_CHECK_TTL_SECONDS,
|
|
76
|
+
)
|
|
77
|
+
from ...common.data_parts import AgentProgressUpdateData
|
|
78
|
+
from ...common.middleware.registry import MiddlewareRegistry
|
|
79
|
+
from ...common.sac.sam_component_base import SamComponentBase
|
|
80
|
+
from ...common.utils.rbac_utils import validate_agent_access
|
|
81
|
+
|
|
82
|
+
log = logging.getLogger(__name__)
|
|
83
|
+
|
|
84
|
+
if TYPE_CHECKING:
|
|
85
|
+
from .app import AgentInitCleanupConfig
|
|
86
|
+
from .task_execution_context import TaskExecutionContext
|
|
87
|
+
|
|
88
|
+
info = {
|
|
89
|
+
"class_name": "SamAgentComponent",
|
|
90
|
+
"description": (
|
|
91
|
+
"Hosts a Google ADK agent and bridges communication via the A2A protocol over Solace. "
|
|
92
|
+
"NOTE: Configuration is defined in the app-level 'app_config' block "
|
|
93
|
+
"and validated by 'SamAgentApp.app_schema' when using the associated App class."
|
|
94
|
+
),
|
|
95
|
+
"config_parameters": [],
|
|
96
|
+
"input_schema": {
|
|
97
|
+
"type": "object",
|
|
98
|
+
"description": "Not typically used; component reacts to events.",
|
|
99
|
+
"properties": {},
|
|
100
|
+
},
|
|
101
|
+
"output_schema": {
|
|
102
|
+
"type": "object",
|
|
103
|
+
"description": "Not typically used; component publishes results to Solace.",
|
|
104
|
+
"properties": {},
|
|
105
|
+
},
|
|
106
|
+
}
|
|
107
|
+
InstructionProvider = Callable[[ReadonlyContext], str]
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
class SamAgentComponent(SamComponentBase):
|
|
111
|
+
"""
|
|
112
|
+
A Solace AI Connector component that hosts a Google ADK agent,
|
|
113
|
+
communicating via the A2A protocol over Solace.
|
|
114
|
+
"""
|
|
115
|
+
|
|
116
|
+
CORRELATION_DATA_PREFIX = CORRELATION_DATA_PREFIX
|
|
117
|
+
HOST_COMPONENT_VERSION = "1.0.0-alpha"
|
|
118
|
+
HEALTH_CHECK_TIMER_ID = "agent_health_check"
|
|
119
|
+
|
|
120
|
+
def __init__(self, **kwargs):
|
|
121
|
+
"""
|
|
122
|
+
Initializes the A2A_ADK_HostComponent.
|
|
123
|
+
Args:
|
|
124
|
+
**kwargs: Configuration parameters passed from the SAC framework.
|
|
125
|
+
Expects configuration under app_config.
|
|
126
|
+
"""
|
|
127
|
+
if "component_config" in kwargs and "app_config" in kwargs["component_config"]:
|
|
128
|
+
name = kwargs["component_config"]["app_config"].get("agent_name")
|
|
129
|
+
if name:
|
|
130
|
+
kwargs.setdefault("name", name)
|
|
131
|
+
|
|
132
|
+
super().__init__(info, **kwargs)
|
|
133
|
+
self.agent_name = self.get_config("agent_name")
|
|
134
|
+
log.info(
|
|
135
|
+
"%s Initializing agent: %s (A2A ADK Host Component)...",
|
|
136
|
+
self.log_identifier,
|
|
137
|
+
self.agent_name,
|
|
138
|
+
)
|
|
139
|
+
|
|
140
|
+
# Initialize the agent registry for health tracking
|
|
141
|
+
self.agent_registry = AgentRegistry()
|
|
142
|
+
try:
|
|
143
|
+
self.namespace = self.get_config("namespace")
|
|
144
|
+
if not self.namespace:
|
|
145
|
+
raise ValueError("Internal Error: Namespace missing after validation.")
|
|
146
|
+
self.supports_streaming = self.get_config("supports_streaming", False)
|
|
147
|
+
self.stream_batching_threshold_bytes = self.get_config(
|
|
148
|
+
"stream_batching_threshold_bytes", 0
|
|
149
|
+
)
|
|
150
|
+
self.agent_name = self.get_config("agent_name")
|
|
151
|
+
if not self.agent_name:
|
|
152
|
+
raise ValueError("Internal Error: Agent name missing after validation.")
|
|
153
|
+
self.model_config = self.get_config("model")
|
|
154
|
+
if not self.model_config:
|
|
155
|
+
raise ValueError(
|
|
156
|
+
"Internal Error: Model config missing after validation."
|
|
157
|
+
)
|
|
158
|
+
self.instruction_config = self.get_config("instruction", "")
|
|
159
|
+
self.global_instruction_config = self.get_config("global_instruction", "")
|
|
160
|
+
self.tools_config = self.get_config("tools", [])
|
|
161
|
+
self.planner_config = self.get_config("planner")
|
|
162
|
+
self.code_executor_config = self.get_config("code_executor")
|
|
163
|
+
self.session_service_config = self.get_config("session_service")
|
|
164
|
+
if not self.session_service_config:
|
|
165
|
+
raise ValueError(
|
|
166
|
+
"Internal Error: Session service config missing after validation."
|
|
167
|
+
)
|
|
168
|
+
self.default_session_behavior = self.session_service_config.get(
|
|
169
|
+
"default_behavior", "PERSISTENT"
|
|
170
|
+
).upper()
|
|
171
|
+
if self.default_session_behavior not in ["PERSISTENT", "RUN_BASED"]:
|
|
172
|
+
log.warning(
|
|
173
|
+
"%s Invalid 'default_behavior' in session_service_config: '%s'. Defaulting to PERSISTENT.",
|
|
174
|
+
self.log_identifier,
|
|
175
|
+
self.default_session_behavior,
|
|
176
|
+
)
|
|
177
|
+
self.default_session_behavior = "PERSISTENT"
|
|
178
|
+
log.info(
|
|
179
|
+
"%s Default session behavior set to: %s",
|
|
180
|
+
self.log_identifier,
|
|
181
|
+
self.default_session_behavior,
|
|
182
|
+
)
|
|
183
|
+
self.artifact_service_config = self.get_config(
|
|
184
|
+
"artifact_service", {"type": "memory"}
|
|
185
|
+
)
|
|
186
|
+
self.memory_service_config = self.get_config(
|
|
187
|
+
"memory_service", {"type": "memory"}
|
|
188
|
+
)
|
|
189
|
+
self.artifact_handling_mode = self.get_config(
|
|
190
|
+
"artifact_handling_mode", "ignore"
|
|
191
|
+
).lower()
|
|
192
|
+
if self.artifact_handling_mode not in ["ignore", "embed", "reference"]:
|
|
193
|
+
log.warning(
|
|
194
|
+
"%s Invalid artifact_handling_mode '%s'. Defaulting to 'ignore'.",
|
|
195
|
+
self.log_identifier,
|
|
196
|
+
self.artifact_handling_mode,
|
|
197
|
+
)
|
|
198
|
+
self.artifact_handling_mode = "ignore"
|
|
199
|
+
log.info(
|
|
200
|
+
"%s Artifact Handling Mode: %s",
|
|
201
|
+
self.log_identifier,
|
|
202
|
+
self.artifact_handling_mode,
|
|
203
|
+
)
|
|
204
|
+
if self.artifact_handling_mode == "reference":
|
|
205
|
+
log.warning(
|
|
206
|
+
"%s Artifact handling mode 'reference' selected, but this component does not currently host an endpoint to serve artifacts. Clients may not be able to retrieve referenced artifacts.",
|
|
207
|
+
self.log_identifier,
|
|
208
|
+
)
|
|
209
|
+
self.agent_card_config = self.get_config("agent_card")
|
|
210
|
+
if not self.agent_card_config:
|
|
211
|
+
raise ValueError(
|
|
212
|
+
"Internal Error: Agent card config missing after validation."
|
|
213
|
+
)
|
|
214
|
+
self.agent_card_publishing_config = self.get_config("agent_card_publishing")
|
|
215
|
+
if not self.agent_card_publishing_config:
|
|
216
|
+
raise ValueError(
|
|
217
|
+
"Internal Error: Agent card publishing config missing after validation."
|
|
218
|
+
)
|
|
219
|
+
self.agent_discovery_config = self.get_config("agent_discovery")
|
|
220
|
+
if not self.agent_discovery_config:
|
|
221
|
+
raise ValueError(
|
|
222
|
+
"Internal Error: Agent discovery config missing after validation."
|
|
223
|
+
)
|
|
224
|
+
self.inter_agent_communication_config = self.get_config(
|
|
225
|
+
"inter_agent_communication"
|
|
226
|
+
)
|
|
227
|
+
if not self.inter_agent_communication_config:
|
|
228
|
+
raise ValueError(
|
|
229
|
+
"Internal Error: Inter-agent comms config missing after validation."
|
|
230
|
+
)
|
|
231
|
+
|
|
232
|
+
self.max_message_size_bytes = self.get_config(
|
|
233
|
+
"max_message_size_bytes", 10_000_000
|
|
234
|
+
)
|
|
235
|
+
|
|
236
|
+
except Exception as e:
|
|
237
|
+
log.error(
|
|
238
|
+
"%s Failed to retrieve configuration via get_config: %s",
|
|
239
|
+
self.log_identifier,
|
|
240
|
+
e,
|
|
241
|
+
)
|
|
242
|
+
raise ValueError(f"Configuration retrieval error: {e}") from e
|
|
243
|
+
self.session_service: BaseSessionService = None
|
|
244
|
+
self.artifact_service: BaseArtifactService = None
|
|
245
|
+
self.memory_service: BaseMemoryService = None
|
|
246
|
+
self.credential_service: Optional[BaseCredentialService] = None
|
|
247
|
+
self.adk_agent: LlmAgent = None
|
|
248
|
+
self.runner: Runner = None
|
|
249
|
+
self.agent_card_tool_manifest: List[Dict[str, Any]] = []
|
|
250
|
+
self.peer_agents: Dict[str, Any] = {} # Keep for backward compatibility
|
|
251
|
+
self._card_publish_timer_id: str = f"publish_card_{self.agent_name}"
|
|
252
|
+
self._async_init_future = None
|
|
253
|
+
self.peer_response_queues: Dict[str, asyncio.Queue] = {}
|
|
254
|
+
self.peer_response_queue_lock = threading.Lock()
|
|
255
|
+
self.agent_specific_state: Dict[str, Any] = {}
|
|
256
|
+
self.active_tasks: Dict[str, "TaskExecutionContext"] = {}
|
|
257
|
+
self.active_tasks_lock = threading.Lock()
|
|
258
|
+
self._tool_cleanup_hooks: List[Callable] = []
|
|
259
|
+
self._agent_system_instruction_string: Optional[str] = None
|
|
260
|
+
self._agent_system_instruction_callback: Optional[
|
|
261
|
+
Callable[[CallbackContext, LlmRequest], Optional[str]]
|
|
262
|
+
] = None
|
|
263
|
+
self._active_background_tasks = set()
|
|
264
|
+
try:
|
|
265
|
+
self.agent_specific_state: Dict[str, Any] = {}
|
|
266
|
+
init_func_details = self.get_config("agent_init_function")
|
|
267
|
+
|
|
268
|
+
try:
|
|
269
|
+
log.info(
|
|
270
|
+
"%s Initializing synchronous ADK services...", self.log_identifier
|
|
271
|
+
)
|
|
272
|
+
self.session_service = initialize_session_service(self)
|
|
273
|
+
self.artifact_service = initialize_artifact_service(self)
|
|
274
|
+
self.memory_service = initialize_memory_service(self)
|
|
275
|
+
self.credential_service = initialize_credential_service(self)
|
|
276
|
+
|
|
277
|
+
log.info(
|
|
278
|
+
"%s Initialized Synchronous ADK services.", self.log_identifier
|
|
279
|
+
)
|
|
280
|
+
except Exception as service_err:
|
|
281
|
+
log.exception(
|
|
282
|
+
"%s Failed to initialize synchronous ADK services: %s",
|
|
283
|
+
self.log_identifier,
|
|
284
|
+
service_err,
|
|
285
|
+
)
|
|
286
|
+
raise RuntimeError(
|
|
287
|
+
f"Failed to initialize synchronous ADK services: {service_err}"
|
|
288
|
+
) from service_err
|
|
289
|
+
|
|
290
|
+
# initialize enterprise features if available
|
|
291
|
+
try:
|
|
292
|
+
from solace_agent_mesh_enterprise.init_enterprise_component import (
|
|
293
|
+
init_enterprise_component_features,
|
|
294
|
+
)
|
|
295
|
+
|
|
296
|
+
init_enterprise_component_features(self)
|
|
297
|
+
except ImportError:
|
|
298
|
+
# Community edition
|
|
299
|
+
# Contact Solace support for enterprise features
|
|
300
|
+
pass
|
|
301
|
+
|
|
302
|
+
from .app import (
|
|
303
|
+
AgentInitCleanupConfig,
|
|
304
|
+
) # delayed import to avoid circular dependency
|
|
305
|
+
|
|
306
|
+
if init_func_details and isinstance(
|
|
307
|
+
init_func_details, AgentInitCleanupConfig
|
|
308
|
+
):
|
|
309
|
+
module_name = init_func_details.get("module")
|
|
310
|
+
func_name = init_func_details.get("name")
|
|
311
|
+
base_path = init_func_details.get("base_path")
|
|
312
|
+
specific_init_params_dict = init_func_details.get("config", {})
|
|
313
|
+
if module_name and func_name:
|
|
314
|
+
log.info(
|
|
315
|
+
"%s Attempting to load init_function: %s.%s",
|
|
316
|
+
self.log_identifier,
|
|
317
|
+
module_name,
|
|
318
|
+
func_name,
|
|
319
|
+
)
|
|
320
|
+
try:
|
|
321
|
+
module = import_module(module_name, base_path=base_path)
|
|
322
|
+
init_function = getattr(module, func_name)
|
|
323
|
+
if not callable(init_function):
|
|
324
|
+
raise TypeError(
|
|
325
|
+
f"Init function '{func_name}' in module '{module_name}' is not callable."
|
|
326
|
+
)
|
|
327
|
+
sig = inspect.signature(init_function)
|
|
328
|
+
pydantic_config_model = None
|
|
329
|
+
config_param_name = None
|
|
330
|
+
validated_config_arg = specific_init_params_dict
|
|
331
|
+
for param_name_sig, param_sig in sig.parameters.items():
|
|
332
|
+
if (
|
|
333
|
+
param_sig.annotation is not inspect.Parameter.empty
|
|
334
|
+
and isinstance(param_sig.annotation, type)
|
|
335
|
+
and issubclass(param_sig.annotation, BaseModel)
|
|
336
|
+
):
|
|
337
|
+
pydantic_config_model = param_sig.annotation
|
|
338
|
+
config_param_name = param_name_sig
|
|
339
|
+
break
|
|
340
|
+
if pydantic_config_model and config_param_name:
|
|
341
|
+
log.info(
|
|
342
|
+
"%s Found Pydantic config model '%s' for init_function parameter '%s'.",
|
|
343
|
+
self.log_identifier,
|
|
344
|
+
pydantic_config_model.__name__,
|
|
345
|
+
config_param_name,
|
|
346
|
+
)
|
|
347
|
+
try:
|
|
348
|
+
validated_config_arg = pydantic_config_model(
|
|
349
|
+
**specific_init_params_dict
|
|
350
|
+
)
|
|
351
|
+
except ValidationError as ve:
|
|
352
|
+
log.error(
|
|
353
|
+
"%s Validation error for init_function config using Pydantic model '%s': %s",
|
|
354
|
+
self.log_identifier,
|
|
355
|
+
pydantic_config_model.__name__,
|
|
356
|
+
ve,
|
|
357
|
+
)
|
|
358
|
+
raise ValueError(
|
|
359
|
+
f"Invalid configuration for init_function '{func_name}': {ve}"
|
|
360
|
+
) from ve
|
|
361
|
+
elif (
|
|
362
|
+
config_param_name
|
|
363
|
+
and param_sig.annotation is not inspect.Parameter.empty
|
|
364
|
+
):
|
|
365
|
+
log.warning(
|
|
366
|
+
"%s Config parameter '%s' for init_function '%s' has a type hint '%s', but it's not a Pydantic BaseModel. Passing raw dict.",
|
|
367
|
+
self.log_identifier,
|
|
368
|
+
config_param_name,
|
|
369
|
+
func_name,
|
|
370
|
+
param_sig.annotation,
|
|
371
|
+
)
|
|
372
|
+
else:
|
|
373
|
+
log.info(
|
|
374
|
+
"%s No Pydantic model type hint found for a config parameter of init_function '%s'. Passing raw dict if a config param exists, or only host_component.",
|
|
375
|
+
self.log_identifier,
|
|
376
|
+
func_name,
|
|
377
|
+
)
|
|
378
|
+
func_params_list = list(sig.parameters.values())
|
|
379
|
+
num_actual_params = len(func_params_list)
|
|
380
|
+
if num_actual_params == 1:
|
|
381
|
+
if specific_init_params_dict:
|
|
382
|
+
log.warning(
|
|
383
|
+
"%s Init function '%s' takes 1 argument, but 'config' was provided in YAML. Config will be ignored.",
|
|
384
|
+
self.log_identifier,
|
|
385
|
+
func_name,
|
|
386
|
+
)
|
|
387
|
+
init_function(self)
|
|
388
|
+
elif num_actual_params == 2:
|
|
389
|
+
actual_config_param_name_in_signature = func_params_list[
|
|
390
|
+
1
|
|
391
|
+
].name
|
|
392
|
+
init_function(
|
|
393
|
+
self,
|
|
394
|
+
**{
|
|
395
|
+
actual_config_param_name_in_signature: validated_config_arg
|
|
396
|
+
},
|
|
397
|
+
)
|
|
398
|
+
else:
|
|
399
|
+
raise TypeError(
|
|
400
|
+
f"Init function '{func_name}' has an unsupported signature. "
|
|
401
|
+
f"Expected (host_component_instance) or (host_component_instance, config_param), "
|
|
402
|
+
f"but got {num_actual_params} parameters."
|
|
403
|
+
)
|
|
404
|
+
log.info(
|
|
405
|
+
"%s Successfully executed init_function: %s.%s",
|
|
406
|
+
self.log_identifier,
|
|
407
|
+
module_name,
|
|
408
|
+
func_name,
|
|
409
|
+
)
|
|
410
|
+
except Exception as e:
|
|
411
|
+
log.exception(
|
|
412
|
+
"%s Fatal error during agent initialization via init_function '%s.%s': %s",
|
|
413
|
+
self.log_identifier,
|
|
414
|
+
module_name,
|
|
415
|
+
func_name,
|
|
416
|
+
e,
|
|
417
|
+
)
|
|
418
|
+
raise RuntimeError(
|
|
419
|
+
f"Agent custom initialization failed: {e}"
|
|
420
|
+
) from e
|
|
421
|
+
|
|
422
|
+
# Async init is now handled by the base class `run` method.
|
|
423
|
+
# We still need a future to signal completion from the async thread.
|
|
424
|
+
self._async_init_future = concurrent.futures.Future()
|
|
425
|
+
|
|
426
|
+
# Set up health check timer if enabled
|
|
427
|
+
health_check_interval_seconds = self.agent_discovery_config.get(
|
|
428
|
+
"health_check_interval_seconds", HEALTH_CHECK_INTERVAL_SECONDS
|
|
429
|
+
)
|
|
430
|
+
if health_check_interval_seconds > 0:
|
|
431
|
+
log.info(
|
|
432
|
+
"%s Scheduling agent health check every %d seconds.",
|
|
433
|
+
self.log_identifier,
|
|
434
|
+
health_check_interval_seconds,
|
|
435
|
+
)
|
|
436
|
+
self.add_timer(
|
|
437
|
+
delay_ms=health_check_interval_seconds * 1000,
|
|
438
|
+
timer_id=self.HEALTH_CHECK_TIMER_ID,
|
|
439
|
+
interval_ms=health_check_interval_seconds * 1000,
|
|
440
|
+
callback=lambda timer_data: self._check_agent_health(),
|
|
441
|
+
)
|
|
442
|
+
else:
|
|
443
|
+
log.warning(
|
|
444
|
+
"%s Agent health check interval not configured or invalid, health checks will not run periodically.",
|
|
445
|
+
self.log_identifier,
|
|
446
|
+
)
|
|
447
|
+
|
|
448
|
+
log.info(
|
|
449
|
+
"%s Initialized agent: %s",
|
|
450
|
+
self.log_identifier,
|
|
451
|
+
self.agent_name,
|
|
452
|
+
)
|
|
453
|
+
except Exception as e:
|
|
454
|
+
log.exception("%s Initialization failed: %s", self.log_identifier, e)
|
|
455
|
+
raise
|
|
456
|
+
|
|
457
|
+
def _get_component_id(self) -> str:
|
|
458
|
+
"""Returns the agent name as the component identifier."""
|
|
459
|
+
return self.agent_name
|
|
460
|
+
|
|
461
|
+
def _get_component_type(self) -> str:
|
|
462
|
+
"""Returns 'agent' as the component type."""
|
|
463
|
+
return "agent"
|
|
464
|
+
|
|
465
|
+
def invoke(self, message: SolaceMessage, data: dict) -> dict:
|
|
466
|
+
"""Placeholder invoke method. Primary logic resides in _handle_message."""
|
|
467
|
+
log.warning(
|
|
468
|
+
"%s 'invoke' method called, but primary logic resides in '_handle_message'. This should not happen in normal operation.",
|
|
469
|
+
self.log_identifier,
|
|
470
|
+
)
|
|
471
|
+
return None
|
|
472
|
+
|
|
473
|
+
async def _handle_message_async(self, message: SolaceMessage, topic: str) -> None:
|
|
474
|
+
"""
|
|
475
|
+
Async handler for incoming messages.
|
|
476
|
+
|
|
477
|
+
Routes the message to the async event handler.
|
|
478
|
+
|
|
479
|
+
Args:
|
|
480
|
+
message: The Solace message
|
|
481
|
+
topic: The topic the message was received on
|
|
482
|
+
"""
|
|
483
|
+
# Create event and process asynchronously
|
|
484
|
+
event = Event(EventType.MESSAGE, message)
|
|
485
|
+
await process_event(self, event)
|
|
486
|
+
|
|
487
|
+
def handle_timer_event(self, timer_data: Dict[str, Any]):
|
|
488
|
+
"""Handles timer events for agent card publishing and health checks."""
|
|
489
|
+
log.debug("%s Received timer event: %s", self.log_identifier, timer_data)
|
|
490
|
+
timer_id = timer_data.get("timer_id")
|
|
491
|
+
|
|
492
|
+
if timer_id == self._card_publish_timer_id:
|
|
493
|
+
publish_agent_card(self)
|
|
494
|
+
elif timer_id == self.HEALTH_CHECK_TIMER_ID:
|
|
495
|
+
self._check_agent_health()
|
|
496
|
+
|
|
497
|
+
async def handle_cache_expiry_event(self, cache_data: Dict[str, Any]):
|
|
498
|
+
"""
|
|
499
|
+
Handles cache expiry events for peer timeouts by calling the atomic claim helper.
|
|
500
|
+
"""
|
|
501
|
+
log.debug("%s Received cache expiry event: %s", self.log_identifier, cache_data)
|
|
502
|
+
sub_task_id = cache_data.get("key")
|
|
503
|
+
logical_task_id = cache_data.get("expired_data")
|
|
504
|
+
|
|
505
|
+
if not (
|
|
506
|
+
sub_task_id
|
|
507
|
+
and sub_task_id.startswith(CORRELATION_DATA_PREFIX)
|
|
508
|
+
and logical_task_id
|
|
509
|
+
):
|
|
510
|
+
log.debug(
|
|
511
|
+
"%s Cache expiry for key '%s' is not a peer sub-task timeout or is missing data.",
|
|
512
|
+
self.log_identifier,
|
|
513
|
+
sub_task_id,
|
|
514
|
+
)
|
|
515
|
+
return
|
|
516
|
+
|
|
517
|
+
correlation_data = await self._claim_peer_sub_task_completion(
|
|
518
|
+
sub_task_id=sub_task_id, logical_task_id_from_event=logical_task_id
|
|
519
|
+
)
|
|
520
|
+
|
|
521
|
+
if correlation_data:
|
|
522
|
+
log.warning(
|
|
523
|
+
"%s Detected timeout for sub-task %s (Main Task: %s). Claimed successfully.",
|
|
524
|
+
self.log_identifier,
|
|
525
|
+
sub_task_id,
|
|
526
|
+
logical_task_id,
|
|
527
|
+
)
|
|
528
|
+
await self._handle_peer_timeout(sub_task_id, correlation_data)
|
|
529
|
+
else:
|
|
530
|
+
log.info(
|
|
531
|
+
"%s Ignoring timeout event for sub-task %s as it was already completed.",
|
|
532
|
+
self.log_identifier,
|
|
533
|
+
sub_task_id,
|
|
534
|
+
)
|
|
535
|
+
|
|
536
|
+
async def get_main_task_context(
|
|
537
|
+
self, logical_task_id: str
|
|
538
|
+
) -> Optional["TaskExecutionContext"]:
|
|
539
|
+
"""
|
|
540
|
+
Retrieves the main task context for a given logical task ID.
|
|
541
|
+
|
|
542
|
+
This method is used when the current agent is the target agent for the task.
|
|
543
|
+
It returns the TaskExecutionContext which contains the full task state including
|
|
544
|
+
a2a_context, active_peer_sub_tasks, and other task execution details.
|
|
545
|
+
|
|
546
|
+
Args:
|
|
547
|
+
logical_task_id: The unique logical ID of the task
|
|
548
|
+
|
|
549
|
+
Returns:
|
|
550
|
+
The TaskExecutionContext if the task is active, None otherwise
|
|
551
|
+
|
|
552
|
+
Raises:
|
|
553
|
+
ValueError: If logical_task_id is None or empty
|
|
554
|
+
"""
|
|
555
|
+
if not logical_task_id:
|
|
556
|
+
raise ValueError("logical_task_id cannot be None or empty")
|
|
557
|
+
|
|
558
|
+
with self.active_tasks_lock:
|
|
559
|
+
active_task_context = self.active_tasks.get(logical_task_id)
|
|
560
|
+
if active_task_context is None:
|
|
561
|
+
log.warning(
|
|
562
|
+
f"No active task context found for logical_task_id: {logical_task_id}"
|
|
563
|
+
)
|
|
564
|
+
return None
|
|
565
|
+
|
|
566
|
+
return active_task_context
|
|
567
|
+
|
|
568
|
+
async def get_all_sub_task_correlation_data_from_logical_task_id(
|
|
569
|
+
self, logical_task_id: str
|
|
570
|
+
) -> list[dict[str, Any]]:
|
|
571
|
+
"""
|
|
572
|
+
Retrieves correlation data for all active peer sub-tasks of a given logical task.
|
|
573
|
+
|
|
574
|
+
This method is used when forwarding requests to other agents in an A2A workflow.
|
|
575
|
+
It returns a list of correlation data dictionaries, each containing information
|
|
576
|
+
about a peer sub-task including peer_task_id, peer_agent_name, and original_task_context.
|
|
577
|
+
|
|
578
|
+
Args:
|
|
579
|
+
logical_task_id: The unique logical ID of the parent task
|
|
580
|
+
|
|
581
|
+
Returns:
|
|
582
|
+
List of correlation data dictionaries for active peer sub-tasks.
|
|
583
|
+
Returns empty list if no active peer sub-tasks exist.
|
|
584
|
+
|
|
585
|
+
Raises:
|
|
586
|
+
ValueError: If logical_task_id is None or empty
|
|
587
|
+
"""
|
|
588
|
+
if not logical_task_id:
|
|
589
|
+
raise ValueError("logical_task_id cannot be None or empty")
|
|
590
|
+
|
|
591
|
+
with self.active_tasks_lock:
|
|
592
|
+
active_task_context = self.active_tasks.get(logical_task_id)
|
|
593
|
+
if active_task_context is None:
|
|
594
|
+
log.warning(
|
|
595
|
+
f"No active task context found for logical_task_id: {logical_task_id}"
|
|
596
|
+
)
|
|
597
|
+
return []
|
|
598
|
+
|
|
599
|
+
active_peer_sub_tasks = active_task_context.active_peer_sub_tasks
|
|
600
|
+
if not active_peer_sub_tasks:
|
|
601
|
+
log.debug(
|
|
602
|
+
f"No active peer sub-tasks found for logical_task_id: {logical_task_id}"
|
|
603
|
+
)
|
|
604
|
+
return []
|
|
605
|
+
|
|
606
|
+
results = []
|
|
607
|
+
for sub_task_id, correlation_data in active_peer_sub_tasks.items():
|
|
608
|
+
if sub_task_id is not None and correlation_data is not None:
|
|
609
|
+
results.append(correlation_data)
|
|
610
|
+
|
|
611
|
+
return results
|
|
612
|
+
|
|
613
|
+
async def _get_correlation_data_for_sub_task(
|
|
614
|
+
self, sub_task_id: str
|
|
615
|
+
) -> Optional[Dict[str, Any]]:
|
|
616
|
+
"""
|
|
617
|
+
Non-destructively retrieves correlation data for a sub-task.
|
|
618
|
+
Used for intermediate events where the sub-task should remain active.
|
|
619
|
+
"""
|
|
620
|
+
logical_task_id = self.cache_service.get_data(sub_task_id)
|
|
621
|
+
if not logical_task_id:
|
|
622
|
+
log.warning(
|
|
623
|
+
"%s No cache entry for sub-task %s. Cannot get correlation data.",
|
|
624
|
+
self.log_identifier,
|
|
625
|
+
sub_task_id,
|
|
626
|
+
)
|
|
627
|
+
return None
|
|
628
|
+
|
|
629
|
+
with self.active_tasks_lock:
|
|
630
|
+
task_context = self.active_tasks.get(logical_task_id)
|
|
631
|
+
|
|
632
|
+
if not task_context:
|
|
633
|
+
log.error(
|
|
634
|
+
"%s TaskExecutionContext not found for task %s, but cache entry existed for sub-task %s. This may indicate a cleanup issue.",
|
|
635
|
+
self.log_identifier,
|
|
636
|
+
logical_task_id,
|
|
637
|
+
sub_task_id,
|
|
638
|
+
)
|
|
639
|
+
return None
|
|
640
|
+
|
|
641
|
+
with task_context.lock:
|
|
642
|
+
return task_context.active_peer_sub_tasks.get(sub_task_id)
|
|
643
|
+
|
|
644
|
+
async def _claim_peer_sub_task_completion(
|
|
645
|
+
self, sub_task_id: str, logical_task_id_from_event: Optional[str] = None
|
|
646
|
+
) -> Optional[Dict[str, Any]]:
|
|
647
|
+
"""
|
|
648
|
+
Atomically claims a sub-task as complete, preventing race conditions.
|
|
649
|
+
This is a destructive operation that removes state.
|
|
650
|
+
|
|
651
|
+
Args:
|
|
652
|
+
sub_task_id: The ID of the sub-task to claim.
|
|
653
|
+
logical_task_id_from_event: The parent task ID, if provided by the event (e.g., a timeout).
|
|
654
|
+
If not provided, it will be looked up from the cache.
|
|
655
|
+
"""
|
|
656
|
+
log_id = f"{self.log_identifier}[ClaimSubTask:{sub_task_id}]"
|
|
657
|
+
logical_task_id = logical_task_id_from_event
|
|
658
|
+
|
|
659
|
+
if not logical_task_id:
|
|
660
|
+
logical_task_id = self.cache_service.get_data(sub_task_id)
|
|
661
|
+
if not logical_task_id:
|
|
662
|
+
log.warning(
|
|
663
|
+
"%s No cache entry found. Task has likely timed out and been cleaned up. Cannot claim.",
|
|
664
|
+
log_id,
|
|
665
|
+
)
|
|
666
|
+
return None
|
|
667
|
+
|
|
668
|
+
with self.active_tasks_lock:
|
|
669
|
+
task_context = self.active_tasks.get(logical_task_id)
|
|
670
|
+
|
|
671
|
+
if not task_context:
|
|
672
|
+
log.error(
|
|
673
|
+
"%s TaskExecutionContext not found for task %s. Cleaning up stale cache entry.",
|
|
674
|
+
log_id,
|
|
675
|
+
logical_task_id,
|
|
676
|
+
)
|
|
677
|
+
self.cache_service.remove_data(sub_task_id)
|
|
678
|
+
return None
|
|
679
|
+
|
|
680
|
+
correlation_data = task_context.claim_sub_task_completion(sub_task_id)
|
|
681
|
+
|
|
682
|
+
if correlation_data:
|
|
683
|
+
# If we successfully claimed the task, remove the timeout tracker from the cache.
|
|
684
|
+
self.cache_service.remove_data(sub_task_id)
|
|
685
|
+
log.info("%s Successfully claimed completion.", log_id)
|
|
686
|
+
return correlation_data
|
|
687
|
+
else:
|
|
688
|
+
# This means the task was already claimed by a competing event (e.g., timeout vs. response).
|
|
689
|
+
log.warning("%s Failed to claim; it was already completed.", log_id)
|
|
690
|
+
return None
|
|
691
|
+
|
|
692
|
+
async def reset_peer_timeout(self, sub_task_id: str):
|
|
693
|
+
"""
|
|
694
|
+
Resets the timeout for a given peer sub-task.
|
|
695
|
+
"""
|
|
696
|
+
log_id = f"{self.log_identifier}[ResetTimeout:{sub_task_id}]"
|
|
697
|
+
log.debug("%s Resetting timeout for peer sub-task.", log_id)
|
|
698
|
+
|
|
699
|
+
# Get the original logical task ID from the cache without removing it
|
|
700
|
+
logical_task_id = self.cache_service.get_data(sub_task_id)
|
|
701
|
+
if not logical_task_id:
|
|
702
|
+
log.warning(
|
|
703
|
+
"%s No active task found for sub-task %s. Cannot reset timeout.",
|
|
704
|
+
log_id,
|
|
705
|
+
sub_task_id,
|
|
706
|
+
)
|
|
707
|
+
return
|
|
708
|
+
|
|
709
|
+
# Get the configured timeout
|
|
710
|
+
timeout_sec = self.inter_agent_communication_config.get(
|
|
711
|
+
"request_timeout_seconds", DEFAULT_COMMUNICATION_TIMEOUT
|
|
712
|
+
)
|
|
713
|
+
|
|
714
|
+
# Update the cache with a new expiry
|
|
715
|
+
self.cache_service.add_data(
|
|
716
|
+
key=sub_task_id,
|
|
717
|
+
value=logical_task_id,
|
|
718
|
+
expiry=timeout_sec,
|
|
719
|
+
component=self,
|
|
720
|
+
)
|
|
721
|
+
log.info(
|
|
722
|
+
"%s Timeout for sub-task %s has been reset to %d seconds.",
|
|
723
|
+
log_id,
|
|
724
|
+
sub_task_id,
|
|
725
|
+
timeout_sec,
|
|
726
|
+
)
|
|
727
|
+
|
|
728
|
+
async def _retrigger_agent_with_peer_responses(
|
|
729
|
+
self,
|
|
730
|
+
results_to_inject: list,
|
|
731
|
+
correlation_data: dict,
|
|
732
|
+
task_context: "TaskExecutionContext",
|
|
733
|
+
):
|
|
734
|
+
"""
|
|
735
|
+
Injects peer tool responses into the session history and re-triggers the ADK runner.
|
|
736
|
+
This function contains the logic to correctly merge parallel tool call responses.
|
|
737
|
+
"""
|
|
738
|
+
original_task_context = correlation_data.get("original_task_context")
|
|
739
|
+
logical_task_id = correlation_data.get("logical_task_id")
|
|
740
|
+
paused_invocation_id = correlation_data.get("invocation_id")
|
|
741
|
+
log_retrigger = f"{self.log_identifier}[RetriggerManager:{logical_task_id}]"
|
|
742
|
+
|
|
743
|
+
# Clear paused state - task is resuming now
|
|
744
|
+
task_context.set_paused(False)
|
|
745
|
+
log.debug(
|
|
746
|
+
"%s Task %s resuming from paused state with peer responses.",
|
|
747
|
+
log_retrigger,
|
|
748
|
+
logical_task_id,
|
|
749
|
+
)
|
|
750
|
+
|
|
751
|
+
try:
|
|
752
|
+
effective_session_id = original_task_context.get("effective_session_id")
|
|
753
|
+
user_id = original_task_context.get("user_id")
|
|
754
|
+
session = await self.session_service.get_session(
|
|
755
|
+
app_name=self.agent_name,
|
|
756
|
+
user_id=user_id,
|
|
757
|
+
session_id=effective_session_id,
|
|
758
|
+
)
|
|
759
|
+
if not session:
|
|
760
|
+
raise RuntimeError(
|
|
761
|
+
f"Could not find ADK session '{effective_session_id}'"
|
|
762
|
+
)
|
|
763
|
+
|
|
764
|
+
new_response_parts = []
|
|
765
|
+
for result in results_to_inject:
|
|
766
|
+
part = adk_types.Part.from_function_response(
|
|
767
|
+
name=result["peer_tool_name"],
|
|
768
|
+
response=result["payload"],
|
|
769
|
+
)
|
|
770
|
+
part.function_response.id = result["adk_function_call_id"]
|
|
771
|
+
new_response_parts.append(part)
|
|
772
|
+
|
|
773
|
+
# Always create a new event for the incoming peer responses.
|
|
774
|
+
# The ADK's `contents` processor is responsible for merging multiple
|
|
775
|
+
# tool responses into a single message before the next LLM call.
|
|
776
|
+
log.info(
|
|
777
|
+
"%s Creating a new tool response event for %d peer responses.",
|
|
778
|
+
log_retrigger,
|
|
779
|
+
len(new_response_parts),
|
|
780
|
+
)
|
|
781
|
+
new_tool_response_content = adk_types.Content(role="tool", parts=new_response_parts)
|
|
782
|
+
|
|
783
|
+
# Always use SSE streaming mode for the ADK runner, even on re-trigger.
|
|
784
|
+
# This ensures that real-time callbacks for status updates and artifact
|
|
785
|
+
# creation can function correctly for all turns of a task.
|
|
786
|
+
streaming_mode = StreamingMode.SSE
|
|
787
|
+
max_llm_calls = self.get_config("max_llm_calls_per_task", 20)
|
|
788
|
+
run_config = RunConfig(
|
|
789
|
+
streaming_mode=streaming_mode, max_llm_calls=max_llm_calls
|
|
790
|
+
)
|
|
791
|
+
|
|
792
|
+
log.info(
|
|
793
|
+
"%s Re-triggering ADK runner for main task %s.",
|
|
794
|
+
log_retrigger,
|
|
795
|
+
logical_task_id,
|
|
796
|
+
)
|
|
797
|
+
try:
|
|
798
|
+
await run_adk_async_task_thread_wrapper(
|
|
799
|
+
self, session, new_tool_response_content, run_config, original_task_context, append_context_event=False
|
|
800
|
+
)
|
|
801
|
+
finally:
|
|
802
|
+
log.info(
|
|
803
|
+
"%s Cleaning up parallel invocation state for invocation %s.",
|
|
804
|
+
log_retrigger,
|
|
805
|
+
paused_invocation_id,
|
|
806
|
+
)
|
|
807
|
+
task_context.clear_parallel_invocation_state(paused_invocation_id)
|
|
808
|
+
|
|
809
|
+
except Exception as e:
|
|
810
|
+
log.exception(
|
|
811
|
+
"%s Failed to re-trigger ADK runner for task %s: %s",
|
|
812
|
+
log_retrigger,
|
|
813
|
+
logical_task_id,
|
|
814
|
+
e,
|
|
815
|
+
)
|
|
816
|
+
if original_task_context:
|
|
817
|
+
loop = self.get_async_loop()
|
|
818
|
+
if loop and loop.is_running():
|
|
819
|
+
asyncio.run_coroutine_threadsafe(
|
|
820
|
+
self.finalize_task_error(e, original_task_context), loop
|
|
821
|
+
)
|
|
822
|
+
else:
|
|
823
|
+
log.error(
|
|
824
|
+
"%s Async loop not available. Cannot schedule error finalization for task %s.",
|
|
825
|
+
log_retrigger,
|
|
826
|
+
logical_task_id,
|
|
827
|
+
)
|
|
828
|
+
|
|
829
|
+
async def _handle_peer_timeout(
|
|
830
|
+
self,
|
|
831
|
+
sub_task_id: str,
|
|
832
|
+
correlation_data: Dict[str, Any],
|
|
833
|
+
):
|
|
834
|
+
"""
|
|
835
|
+
Handles the timeout of a peer agent task. It sends a cancellation request
|
|
836
|
+
to the peer, updates the local completion counter, and potentially
|
|
837
|
+
re-triggers the runner if all parallel tasks are now complete.
|
|
838
|
+
"""
|
|
839
|
+
logical_task_id = correlation_data.get("logical_task_id")
|
|
840
|
+
invocation_id = correlation_data.get("invocation_id")
|
|
841
|
+
log_retrigger = f"{self.log_identifier}[RetriggerManager:{logical_task_id}]"
|
|
842
|
+
|
|
843
|
+
log.warning(
|
|
844
|
+
"%s Peer request timed out for sub-task: %s (Invocation: %s)",
|
|
845
|
+
log_retrigger,
|
|
846
|
+
sub_task_id,
|
|
847
|
+
invocation_id,
|
|
848
|
+
)
|
|
849
|
+
|
|
850
|
+
# Proactively send a cancellation request to the peer agent.
|
|
851
|
+
peer_agent_name = correlation_data.get("peer_agent_name")
|
|
852
|
+
if peer_agent_name:
|
|
853
|
+
try:
|
|
854
|
+
log.info(
|
|
855
|
+
"%s Sending CancelTaskRequest to peer '%s' for timed-out sub-task %s.",
|
|
856
|
+
log_retrigger,
|
|
857
|
+
peer_agent_name,
|
|
858
|
+
sub_task_id,
|
|
859
|
+
)
|
|
860
|
+
task_id_for_peer = sub_task_id.replace(CORRELATION_DATA_PREFIX, "", 1)
|
|
861
|
+
cancel_request = a2a.create_cancel_task_request(
|
|
862
|
+
task_id=task_id_for_peer
|
|
863
|
+
)
|
|
864
|
+
user_props = {"clientId": self.agent_name}
|
|
865
|
+
peer_topic = self._get_agent_request_topic(peer_agent_name)
|
|
866
|
+
self.publish_a2a_message(
|
|
867
|
+
payload=cancel_request.model_dump(exclude_none=True),
|
|
868
|
+
topic=peer_topic,
|
|
869
|
+
user_properties=user_props,
|
|
870
|
+
)
|
|
871
|
+
except Exception as e:
|
|
872
|
+
log.error(
|
|
873
|
+
"%s Failed to send CancelTaskRequest to peer '%s' for sub-task %s: %s",
|
|
874
|
+
log_retrigger,
|
|
875
|
+
peer_agent_name,
|
|
876
|
+
sub_task_id,
|
|
877
|
+
e,
|
|
878
|
+
)
|
|
879
|
+
|
|
880
|
+
# Process the timeout locally.
|
|
881
|
+
with self.active_tasks_lock:
|
|
882
|
+
task_context = self.active_tasks.get(logical_task_id)
|
|
883
|
+
|
|
884
|
+
if not task_context:
|
|
885
|
+
log.warning(
|
|
886
|
+
"%s TaskExecutionContext not found for task %s. Ignoring timeout event.",
|
|
887
|
+
log_retrigger,
|
|
888
|
+
logical_task_id,
|
|
889
|
+
)
|
|
890
|
+
return
|
|
891
|
+
|
|
892
|
+
timeout_value = self.inter_agent_communication_config.get(
|
|
893
|
+
"request_timeout_seconds", DEFAULT_COMMUNICATION_TIMEOUT
|
|
894
|
+
)
|
|
895
|
+
all_sub_tasks_completed = task_context.handle_peer_timeout(
|
|
896
|
+
sub_task_id, correlation_data, timeout_value, invocation_id
|
|
897
|
+
)
|
|
898
|
+
|
|
899
|
+
if not all_sub_tasks_completed:
|
|
900
|
+
log.info(
|
|
901
|
+
"%s Waiting for more peer responses for invocation %s after timeout of sub-task %s.",
|
|
902
|
+
log_retrigger,
|
|
903
|
+
invocation_id,
|
|
904
|
+
sub_task_id,
|
|
905
|
+
)
|
|
906
|
+
return
|
|
907
|
+
|
|
908
|
+
log.info(
|
|
909
|
+
"%s All peer responses/timeouts received for invocation %s. Retriggering agent.",
|
|
910
|
+
log_retrigger,
|
|
911
|
+
invocation_id,
|
|
912
|
+
)
|
|
913
|
+
results_to_inject = task_context.parallel_tool_calls[invocation_id].get(
|
|
914
|
+
"results", []
|
|
915
|
+
)
|
|
916
|
+
|
|
917
|
+
await self._retrigger_agent_with_peer_responses(
|
|
918
|
+
results_to_inject, correlation_data, task_context
|
|
919
|
+
)
|
|
920
|
+
|
|
921
|
+
def _inject_peer_tools_callback(
|
|
922
|
+
self, callback_context: CallbackContext, llm_request: LlmRequest
|
|
923
|
+
) -> Optional[LlmResponse]:
|
|
924
|
+
"""
|
|
925
|
+
ADK before_model_callback to dynamically add PeerAgentTools to the LLM request
|
|
926
|
+
and generate the corresponding instruction text for the LLM.
|
|
927
|
+
"""
|
|
928
|
+
log.debug("%s Running _inject_peer_tools_callback...", self.log_identifier)
|
|
929
|
+
if not self.peer_agents:
|
|
930
|
+
log.debug("%s No peer agents currently discovered.", self.log_identifier)
|
|
931
|
+
return None
|
|
932
|
+
|
|
933
|
+
a2a_context = callback_context.state.get("a2a_context", {})
|
|
934
|
+
user_config = (
|
|
935
|
+
a2a_context.get("a2a_user_config", {})
|
|
936
|
+
if isinstance(a2a_context, dict)
|
|
937
|
+
else {}
|
|
938
|
+
)
|
|
939
|
+
|
|
940
|
+
inter_agent_config = self.get_config("inter_agent_communication", {})
|
|
941
|
+
allow_list = inter_agent_config.get("allow_list", ["*"])
|
|
942
|
+
deny_list = set(self.get_config("deny_list", []))
|
|
943
|
+
self_name = self.get_config("agent_name")
|
|
944
|
+
|
|
945
|
+
peer_tools_to_add = []
|
|
946
|
+
allowed_peer_descriptions = []
|
|
947
|
+
|
|
948
|
+
# Sort peer agents alphabetically to ensure consistent tool ordering for prompt caching
|
|
949
|
+
for peer_name, agent_card in sorted(self.peer_agents.items()):
|
|
950
|
+
if not isinstance(agent_card, AgentCard) or peer_name == self_name:
|
|
951
|
+
continue
|
|
952
|
+
|
|
953
|
+
is_allowed = any(
|
|
954
|
+
fnmatch.fnmatch(peer_name, p) for p in allow_list
|
|
955
|
+
) and not any(fnmatch.fnmatch(peer_name, p) for p in deny_list)
|
|
956
|
+
|
|
957
|
+
if is_allowed:
|
|
958
|
+
config_resolver = MiddlewareRegistry.get_config_resolver()
|
|
959
|
+
operation_spec = {
|
|
960
|
+
"operation_type": "peer_delegation",
|
|
961
|
+
"target_agent": peer_name,
|
|
962
|
+
"delegation_context": "peer_discovery",
|
|
963
|
+
}
|
|
964
|
+
validation_context = {
|
|
965
|
+
"discovery_phase": "peer_enumeration",
|
|
966
|
+
"agent_context": {"component_type": "peer_discovery"},
|
|
967
|
+
}
|
|
968
|
+
validation_result = config_resolver.validate_operation_config(
|
|
969
|
+
user_config, operation_spec, validation_context
|
|
970
|
+
)
|
|
971
|
+
if not validation_result.get("valid", True):
|
|
972
|
+
log.debug(
|
|
973
|
+
"%s Peer agent '%s' filtered out by user configuration.",
|
|
974
|
+
self.log_identifier,
|
|
975
|
+
peer_name,
|
|
976
|
+
)
|
|
977
|
+
is_allowed = False
|
|
978
|
+
|
|
979
|
+
if not is_allowed:
|
|
980
|
+
continue
|
|
981
|
+
|
|
982
|
+
try:
|
|
983
|
+
peer_tool_instance = PeerAgentTool(
|
|
984
|
+
target_agent_name=peer_name, host_component=self
|
|
985
|
+
)
|
|
986
|
+
if peer_tool_instance.name not in llm_request.tools_dict:
|
|
987
|
+
peer_tools_to_add.append(peer_tool_instance)
|
|
988
|
+
# Get enhanced description from the tool instance
|
|
989
|
+
# which includes capabilities, skills, and tools
|
|
990
|
+
enhanced_desc = peer_tool_instance._build_enhanced_description(
|
|
991
|
+
agent_card
|
|
992
|
+
)
|
|
993
|
+
allowed_peer_descriptions.append(
|
|
994
|
+
f"\n### `peer_{peer_name}`\n{enhanced_desc}"
|
|
995
|
+
)
|
|
996
|
+
except Exception as e:
|
|
997
|
+
log.error(
|
|
998
|
+
"%s Failed to create PeerAgentTool for '%s': %s",
|
|
999
|
+
self.log_identifier,
|
|
1000
|
+
peer_name,
|
|
1001
|
+
e,
|
|
1002
|
+
)
|
|
1003
|
+
|
|
1004
|
+
if allowed_peer_descriptions:
|
|
1005
|
+
peer_list_str = "\n".join(allowed_peer_descriptions)
|
|
1006
|
+
instruction_text = (
|
|
1007
|
+
"## Peer Agent Delegation\n\n"
|
|
1008
|
+
"You can delegate tasks to other specialized agents if they are better suited.\n\n"
|
|
1009
|
+
"**How to delegate:**\n"
|
|
1010
|
+
"- Use the `peer_<agent_name>(task_description: str)` tool for delegation\n"
|
|
1011
|
+
"- Replace `<agent_name>` with the actual name of the target agent\n"
|
|
1012
|
+
"- Provide a clear and detailed `task_description` for the peer agent\n"
|
|
1013
|
+
"- **Important:** The peer agent does not have access to your session history, "
|
|
1014
|
+
"so you must provide all required context necessary to fulfill the request\n\n"
|
|
1015
|
+
"## Available Peer Agents\n"
|
|
1016
|
+
f"{peer_list_str}"
|
|
1017
|
+
)
|
|
1018
|
+
callback_context.state["peer_tool_instructions"] = instruction_text
|
|
1019
|
+
log.debug(
|
|
1020
|
+
"%s Stored peer tool instructions in callback_context.state.",
|
|
1021
|
+
self.log_identifier,
|
|
1022
|
+
)
|
|
1023
|
+
|
|
1024
|
+
if peer_tools_to_add:
|
|
1025
|
+
try:
|
|
1026
|
+
if llm_request.config.tools is None:
|
|
1027
|
+
llm_request.config.tools = []
|
|
1028
|
+
if len(llm_request.config.tools) > 0:
|
|
1029
|
+
for tool in peer_tools_to_add:
|
|
1030
|
+
llm_request.tools_dict[tool.name] = tool
|
|
1031
|
+
llm_request.config.tools[0].function_declarations.append(
|
|
1032
|
+
tool._get_declaration()
|
|
1033
|
+
)
|
|
1034
|
+
else:
|
|
1035
|
+
llm_request.append_tools(peer_tools_to_add)
|
|
1036
|
+
log.debug(
|
|
1037
|
+
"%s Dynamically added %d PeerAgentTool(s) to LLM request.",
|
|
1038
|
+
self.log_identifier,
|
|
1039
|
+
len(peer_tools_to_add),
|
|
1040
|
+
)
|
|
1041
|
+
except Exception as e:
|
|
1042
|
+
log.error(
|
|
1043
|
+
"%s Failed to append dynamic peer tools to LLM request: %s",
|
|
1044
|
+
self.log_identifier,
|
|
1045
|
+
e,
|
|
1046
|
+
)
|
|
1047
|
+
return None
|
|
1048
|
+
|
|
1049
|
+
def _filter_tools_by_capability_callback(
|
|
1050
|
+
self, callback_context: CallbackContext, llm_request: LlmRequest
|
|
1051
|
+
) -> Optional[LlmResponse]:
|
|
1052
|
+
"""
|
|
1053
|
+
ADK before_model_callback to filter tools in the LlmRequest based on user configuration.
|
|
1054
|
+
This callback modifies `llm_request.config.tools` in place by potentially
|
|
1055
|
+
removing individual FunctionDeclarations from genai.Tool objects or removing
|
|
1056
|
+
entire genai.Tool objects if all their declarations are filtered out.
|
|
1057
|
+
"""
|
|
1058
|
+
log_id_prefix = f"{self.log_identifier}[ToolCapabilityFilter]"
|
|
1059
|
+
log.debug("%s Running _filter_tools_by_capability_callback...", log_id_prefix)
|
|
1060
|
+
|
|
1061
|
+
a2a_context = callback_context.state.get("a2a_context", {})
|
|
1062
|
+
if not isinstance(a2a_context, dict):
|
|
1063
|
+
log.warning(
|
|
1064
|
+
"%s 'a2a_context' in session state is not a dictionary. Using empty configuration.",
|
|
1065
|
+
log_id_prefix,
|
|
1066
|
+
)
|
|
1067
|
+
a2a_context = {}
|
|
1068
|
+
user_config = a2a_context.get("a2a_user_config", {})
|
|
1069
|
+
if not isinstance(user_config, dict):
|
|
1070
|
+
log.warning(
|
|
1071
|
+
"%s 'a2a_user_config' in a2a_context is not a dictionary. Using empty configuration.",
|
|
1072
|
+
log_id_prefix,
|
|
1073
|
+
)
|
|
1074
|
+
user_config = {}
|
|
1075
|
+
|
|
1076
|
+
log.debug(
|
|
1077
|
+
"%s User configuration for filtering: %s",
|
|
1078
|
+
log_id_prefix,
|
|
1079
|
+
{k: v for k, v in user_config.items() if not k.startswith("_")},
|
|
1080
|
+
)
|
|
1081
|
+
|
|
1082
|
+
config_resolver = MiddlewareRegistry.get_config_resolver()
|
|
1083
|
+
|
|
1084
|
+
if not llm_request.config or not llm_request.config.tools:
|
|
1085
|
+
log.debug("%s No tools in request to filter.", log_id_prefix)
|
|
1086
|
+
return None
|
|
1087
|
+
|
|
1088
|
+
explicit_tools_config = self.get_config("tools", [])
|
|
1089
|
+
final_filtered_genai_tools: List[adk_types.Tool] = []
|
|
1090
|
+
original_genai_tools_count = len(llm_request.config.tools)
|
|
1091
|
+
original_function_declarations_count = 0
|
|
1092
|
+
|
|
1093
|
+
for original_tool in llm_request.config.tools:
|
|
1094
|
+
if not original_tool.function_declarations:
|
|
1095
|
+
log.warning(
|
|
1096
|
+
"%s genai.Tool object has no function declarations. Keeping it.",
|
|
1097
|
+
log_id_prefix,
|
|
1098
|
+
)
|
|
1099
|
+
final_filtered_genai_tools.append(original_tool)
|
|
1100
|
+
continue
|
|
1101
|
+
|
|
1102
|
+
original_function_declarations_count += len(
|
|
1103
|
+
original_tool.function_declarations
|
|
1104
|
+
)
|
|
1105
|
+
permitted_declarations_for_this_tool: List[
|
|
1106
|
+
adk_types.FunctionDeclaration
|
|
1107
|
+
] = []
|
|
1108
|
+
|
|
1109
|
+
for func_decl in original_tool.function_declarations:
|
|
1110
|
+
func_decl_name = func_decl.name
|
|
1111
|
+
tool_object = llm_request.tools_dict.get(func_decl_name)
|
|
1112
|
+
origin = SamAgentComponent._extract_tool_origin(tool_object)
|
|
1113
|
+
|
|
1114
|
+
feature_descriptor = {
|
|
1115
|
+
"feature_type": "tool_function",
|
|
1116
|
+
"function_name": func_decl_name,
|
|
1117
|
+
"tool_source": origin,
|
|
1118
|
+
"tool_metadata": {"function_name": func_decl_name},
|
|
1119
|
+
}
|
|
1120
|
+
|
|
1121
|
+
if origin == "peer_agent":
|
|
1122
|
+
peer_name = func_decl_name.replace(PEER_TOOL_PREFIX, "", 1)
|
|
1123
|
+
feature_descriptor["tool_metadata"]["peer_agent_name"] = peer_name
|
|
1124
|
+
elif origin == "builtin":
|
|
1125
|
+
tool_def = tool_registry.get_tool_by_name(func_decl_name)
|
|
1126
|
+
if tool_def:
|
|
1127
|
+
feature_descriptor["tool_metadata"][
|
|
1128
|
+
"tool_category"
|
|
1129
|
+
] = tool_def.category
|
|
1130
|
+
feature_descriptor["tool_metadata"][
|
|
1131
|
+
"required_scopes"
|
|
1132
|
+
] = tool_def.required_scopes
|
|
1133
|
+
elif origin in ["python", "mcp", "adk_builtin"]:
|
|
1134
|
+
# Find the explicit config for this tool to pass to the resolver
|
|
1135
|
+
for tool_cfg in explicit_tools_config:
|
|
1136
|
+
cfg_tool_type = tool_cfg.get("tool_type")
|
|
1137
|
+
cfg_tool_name = tool_cfg.get("tool_name")
|
|
1138
|
+
cfg_func_name = tool_cfg.get("function_name")
|
|
1139
|
+
if (
|
|
1140
|
+
cfg_tool_type == "python"
|
|
1141
|
+
and cfg_func_name == func_decl_name
|
|
1142
|
+
) or (
|
|
1143
|
+
cfg_tool_type in ["builtin", "mcp"]
|
|
1144
|
+
and cfg_tool_name == func_decl_name
|
|
1145
|
+
):
|
|
1146
|
+
feature_descriptor["tool_metadata"][
|
|
1147
|
+
"tool_config"
|
|
1148
|
+
] = tool_cfg
|
|
1149
|
+
break
|
|
1150
|
+
|
|
1151
|
+
context = {
|
|
1152
|
+
"agent_context": self.get_agent_context(),
|
|
1153
|
+
"filter_phase": "pre_llm",
|
|
1154
|
+
"tool_configurations": {
|
|
1155
|
+
"explicit_tools": explicit_tools_config,
|
|
1156
|
+
},
|
|
1157
|
+
}
|
|
1158
|
+
|
|
1159
|
+
if config_resolver.is_feature_enabled(
|
|
1160
|
+
user_config, feature_descriptor, context
|
|
1161
|
+
):
|
|
1162
|
+
permitted_declarations_for_this_tool.append(func_decl)
|
|
1163
|
+
log.debug(
|
|
1164
|
+
"%s FunctionDeclaration '%s' (Source: %s) permitted.",
|
|
1165
|
+
log_id_prefix,
|
|
1166
|
+
func_decl_name,
|
|
1167
|
+
origin,
|
|
1168
|
+
)
|
|
1169
|
+
else:
|
|
1170
|
+
log.info(
|
|
1171
|
+
"%s FunctionDeclaration '%s' (Source: %s) FILTERED OUT due to configuration restrictions.",
|
|
1172
|
+
log_id_prefix,
|
|
1173
|
+
func_decl_name,
|
|
1174
|
+
origin,
|
|
1175
|
+
)
|
|
1176
|
+
|
|
1177
|
+
if permitted_declarations_for_this_tool:
|
|
1178
|
+
scoped_tool = original_tool.model_copy(deep=True)
|
|
1179
|
+
scoped_tool.function_declarations = permitted_declarations_for_this_tool
|
|
1180
|
+
|
|
1181
|
+
final_filtered_genai_tools.append(scoped_tool)
|
|
1182
|
+
log.debug(
|
|
1183
|
+
"%s Keeping genai.Tool as it has %d permitted FunctionDeclaration(s).",
|
|
1184
|
+
log_id_prefix,
|
|
1185
|
+
len(permitted_declarations_for_this_tool),
|
|
1186
|
+
)
|
|
1187
|
+
else:
|
|
1188
|
+
log.info(
|
|
1189
|
+
"%s Entire genai.Tool (original declarations: %s) FILTERED OUT as all its FunctionDeclarations were denied by configuration.",
|
|
1190
|
+
log_id_prefix,
|
|
1191
|
+
[fd.name for fd in original_tool.function_declarations],
|
|
1192
|
+
)
|
|
1193
|
+
|
|
1194
|
+
final_function_declarations_count = sum(
|
|
1195
|
+
len(t.function_declarations)
|
|
1196
|
+
for t in final_filtered_genai_tools
|
|
1197
|
+
if t.function_declarations
|
|
1198
|
+
)
|
|
1199
|
+
|
|
1200
|
+
if final_function_declarations_count != original_function_declarations_count:
|
|
1201
|
+
log.info(
|
|
1202
|
+
"%s Tool list modified by capability filter. Original genai.Tools: %d (Total Declarations: %d). Filtered genai.Tools: %d (Total Declarations: %d).",
|
|
1203
|
+
log_id_prefix,
|
|
1204
|
+
original_genai_tools_count,
|
|
1205
|
+
original_function_declarations_count,
|
|
1206
|
+
len(final_filtered_genai_tools),
|
|
1207
|
+
final_function_declarations_count,
|
|
1208
|
+
)
|
|
1209
|
+
llm_request.config.tools = (
|
|
1210
|
+
final_filtered_genai_tools if final_filtered_genai_tools else None
|
|
1211
|
+
)
|
|
1212
|
+
else:
|
|
1213
|
+
log.debug(
|
|
1214
|
+
"%s Tool list and FunctionDeclarations unchanged after capability filtering.",
|
|
1215
|
+
log_id_prefix,
|
|
1216
|
+
)
|
|
1217
|
+
|
|
1218
|
+
return None
|
|
1219
|
+
|
|
1220
|
+
@staticmethod
|
|
1221
|
+
def _extract_tool_origin(tool) -> str:
|
|
1222
|
+
"""
|
|
1223
|
+
Helper method to extract the origin of a tool from various possible attributes.
|
|
1224
|
+
"""
|
|
1225
|
+
if hasattr(tool, "origin") and tool.origin is not None:
|
|
1226
|
+
return tool.origin
|
|
1227
|
+
elif (
|
|
1228
|
+
hasattr(tool, "func")
|
|
1229
|
+
and hasattr(tool.func, "origin")
|
|
1230
|
+
and tool.func.origin is not None
|
|
1231
|
+
):
|
|
1232
|
+
return tool.func.origin
|
|
1233
|
+
else:
|
|
1234
|
+
return getattr(tool, "origin", "unknown")
|
|
1235
|
+
|
|
1236
|
+
def get_agent_context(self) -> Dict[str, Any]:
|
|
1237
|
+
"""Get agent context for middleware calls."""
|
|
1238
|
+
return {
|
|
1239
|
+
"agent_name": getattr(self, "agent_name", "unknown"),
|
|
1240
|
+
"component_type": "sac_agent",
|
|
1241
|
+
}
|
|
1242
|
+
|
|
1243
|
+
def _inject_gateway_instructions_callback(
|
|
1244
|
+
self, callback_context: CallbackContext, llm_request: LlmRequest
|
|
1245
|
+
) -> Optional[LlmResponse]:
|
|
1246
|
+
"""
|
|
1247
|
+
ADK before_model_callback to dynamically prepend gateway-defined system_purpose
|
|
1248
|
+
and response_format to the agent's llm_request.config.system_instruction.
|
|
1249
|
+
"""
|
|
1250
|
+
log_id_prefix = f"{self.log_identifier}[GatewayInstrInject]"
|
|
1251
|
+
log.debug(
|
|
1252
|
+
"%s Running _inject_gateway_instructions_callback to modify system_instruction...",
|
|
1253
|
+
log_id_prefix,
|
|
1254
|
+
)
|
|
1255
|
+
|
|
1256
|
+
a2a_context = callback_context.state.get("a2a_context", {})
|
|
1257
|
+
if not isinstance(a2a_context, dict):
|
|
1258
|
+
log.warning(
|
|
1259
|
+
"%s 'a2a_context' in session state is not a dictionary. Skipping instruction injection.",
|
|
1260
|
+
log_id_prefix,
|
|
1261
|
+
)
|
|
1262
|
+
return None
|
|
1263
|
+
|
|
1264
|
+
system_purpose = a2a_context.get("system_purpose")
|
|
1265
|
+
response_format = a2a_context.get("response_format")
|
|
1266
|
+
user_profile = a2a_context.get("a2a_user_config", {}).get("user_profile")
|
|
1267
|
+
|
|
1268
|
+
inject_purpose = self.get_config("inject_system_purpose", False)
|
|
1269
|
+
inject_format = self.get_config("inject_response_format", False)
|
|
1270
|
+
inject_user_profile = self.get_config("inject_user_profile", False)
|
|
1271
|
+
|
|
1272
|
+
gateway_instructions_to_add = []
|
|
1273
|
+
|
|
1274
|
+
if (
|
|
1275
|
+
inject_purpose
|
|
1276
|
+
and system_purpose
|
|
1277
|
+
and isinstance(system_purpose, str)
|
|
1278
|
+
and system_purpose.strip()
|
|
1279
|
+
):
|
|
1280
|
+
gateway_instructions_to_add.append(
|
|
1281
|
+
f"System Purpose:\n{system_purpose.strip()}"
|
|
1282
|
+
)
|
|
1283
|
+
log.debug(
|
|
1284
|
+
"%s Prepared system_purpose for system_instruction.", log_id_prefix
|
|
1285
|
+
)
|
|
1286
|
+
|
|
1287
|
+
if (
|
|
1288
|
+
inject_format
|
|
1289
|
+
and response_format
|
|
1290
|
+
and isinstance(response_format, str)
|
|
1291
|
+
and response_format.strip()
|
|
1292
|
+
):
|
|
1293
|
+
gateway_instructions_to_add.append(
|
|
1294
|
+
f"Desired Response Format:\n{response_format.strip()}"
|
|
1295
|
+
)
|
|
1296
|
+
log.debug(
|
|
1297
|
+
"%s Prepared response_format for system_instruction.", log_id_prefix
|
|
1298
|
+
)
|
|
1299
|
+
|
|
1300
|
+
if (
|
|
1301
|
+
inject_user_profile
|
|
1302
|
+
and user_profile
|
|
1303
|
+
and (isinstance(user_profile, str) or isinstance(user_profile, dict))
|
|
1304
|
+
):
|
|
1305
|
+
if isinstance(user_profile, dict):
|
|
1306
|
+
user_profile = json.dumps(user_profile, indent=2, default=str)
|
|
1307
|
+
gateway_instructions_to_add.append(
|
|
1308
|
+
f"Inquiring User Profile:\n{user_profile.strip()}\n"
|
|
1309
|
+
)
|
|
1310
|
+
log.debug("%s Prepared user_profile for system_instruction.", log_id_prefix)
|
|
1311
|
+
|
|
1312
|
+
if not gateway_instructions_to_add:
|
|
1313
|
+
log.debug(
|
|
1314
|
+
"%s No gateway instructions to inject into system_instruction.",
|
|
1315
|
+
log_id_prefix,
|
|
1316
|
+
)
|
|
1317
|
+
return None
|
|
1318
|
+
|
|
1319
|
+
if llm_request.config is None:
|
|
1320
|
+
log.warning(
|
|
1321
|
+
"%s llm_request.config is None, cannot append gateway instructions to system_instruction.",
|
|
1322
|
+
log_id_prefix,
|
|
1323
|
+
)
|
|
1324
|
+
return None
|
|
1325
|
+
|
|
1326
|
+
if llm_request.config.system_instruction is None:
|
|
1327
|
+
llm_request.config.system_instruction = ""
|
|
1328
|
+
|
|
1329
|
+
combined_new_instructions = "\n\n".join(gateway_instructions_to_add)
|
|
1330
|
+
|
|
1331
|
+
if llm_request.config.system_instruction:
|
|
1332
|
+
llm_request.config.system_instruction += (
|
|
1333
|
+
f"\n\n---\n\n{combined_new_instructions}"
|
|
1334
|
+
)
|
|
1335
|
+
else:
|
|
1336
|
+
llm_request.config.system_instruction = combined_new_instructions
|
|
1337
|
+
|
|
1338
|
+
log.info(
|
|
1339
|
+
"%s Injected %d gateway instruction block(s) into llm_request.config.system_instruction.",
|
|
1340
|
+
log_id_prefix,
|
|
1341
|
+
len(gateway_instructions_to_add),
|
|
1342
|
+
)
|
|
1343
|
+
|
|
1344
|
+
return None
|
|
1345
|
+
|
|
1346
|
+
async def _publish_text_as_partial_a2a_status_update(
|
|
1347
|
+
self,
|
|
1348
|
+
text_content: str,
|
|
1349
|
+
a2a_context: Dict,
|
|
1350
|
+
is_stream_terminating_content: bool = False,
|
|
1351
|
+
):
|
|
1352
|
+
"""
|
|
1353
|
+
Constructs and publishes a TaskStatusUpdateEvent for the given text.
|
|
1354
|
+
The 'final' flag is determined by is_stream_terminating_content.
|
|
1355
|
+
This method skips buffer flushing since it's used for LLM streaming text.
|
|
1356
|
+
"""
|
|
1357
|
+
logical_task_id = a2a_context.get("logical_task_id", "unknown_task")
|
|
1358
|
+
log_identifier_helper = (
|
|
1359
|
+
f"{self.log_identifier}[PublishPartialText:{logical_task_id}]"
|
|
1360
|
+
)
|
|
1361
|
+
|
|
1362
|
+
if not text_content:
|
|
1363
|
+
log.debug(
|
|
1364
|
+
"%s No text content to publish as update (final=%s).",
|
|
1365
|
+
log_identifier_helper,
|
|
1366
|
+
is_stream_terminating_content,
|
|
1367
|
+
)
|
|
1368
|
+
return
|
|
1369
|
+
|
|
1370
|
+
try:
|
|
1371
|
+
a2a_message = a2a.create_agent_text_message(
|
|
1372
|
+
text=text_content,
|
|
1373
|
+
task_id=logical_task_id,
|
|
1374
|
+
context_id=a2a_context.get("contextId"),
|
|
1375
|
+
)
|
|
1376
|
+
event_metadata = {"agent_name": self.agent_name}
|
|
1377
|
+
status_update_event = a2a.create_status_update(
|
|
1378
|
+
task_id=logical_task_id,
|
|
1379
|
+
context_id=a2a_context.get("contextId"),
|
|
1380
|
+
message=a2a_message,
|
|
1381
|
+
is_final=is_stream_terminating_content,
|
|
1382
|
+
metadata=event_metadata,
|
|
1383
|
+
)
|
|
1384
|
+
|
|
1385
|
+
await self._publish_status_update_with_buffer_flush(
|
|
1386
|
+
status_update_event,
|
|
1387
|
+
a2a_context,
|
|
1388
|
+
skip_buffer_flush=True,
|
|
1389
|
+
)
|
|
1390
|
+
|
|
1391
|
+
log.debug(
|
|
1392
|
+
"%s Published LLM streaming text (length: %d bytes, final: %s).",
|
|
1393
|
+
log_identifier_helper,
|
|
1394
|
+
len(text_content.encode("utf-8")),
|
|
1395
|
+
is_stream_terminating_content,
|
|
1396
|
+
)
|
|
1397
|
+
|
|
1398
|
+
except Exception as e:
|
|
1399
|
+
log.exception(
|
|
1400
|
+
"%s Error in _publish_text_as_partial_a2a_status_update: %s",
|
|
1401
|
+
log_identifier_helper,
|
|
1402
|
+
e,
|
|
1403
|
+
)
|
|
1404
|
+
|
|
1405
|
+
async def _publish_agent_status_signal_update(
|
|
1406
|
+
self, status_text: str, a2a_context: Dict
|
|
1407
|
+
):
|
|
1408
|
+
"""
|
|
1409
|
+
Constructs and publishes a TaskStatusUpdateEvent specifically for agent_status_message signals.
|
|
1410
|
+
This method will flush the buffer before publishing to maintain proper message ordering.
|
|
1411
|
+
"""
|
|
1412
|
+
logical_task_id = a2a_context.get("logical_task_id", "unknown_task")
|
|
1413
|
+
log_identifier_helper = (
|
|
1414
|
+
f"{self.log_identifier}[PublishAgentSignal:{logical_task_id}]"
|
|
1415
|
+
)
|
|
1416
|
+
|
|
1417
|
+
if not status_text:
|
|
1418
|
+
log.debug(
|
|
1419
|
+
"%s No text content for agent status signal.", log_identifier_helper
|
|
1420
|
+
)
|
|
1421
|
+
return
|
|
1422
|
+
|
|
1423
|
+
try:
|
|
1424
|
+
progress_data = AgentProgressUpdateData(status_text=status_text)
|
|
1425
|
+
status_update_event = a2a.create_data_signal_event(
|
|
1426
|
+
task_id=logical_task_id,
|
|
1427
|
+
context_id=a2a_context.get("contextId"),
|
|
1428
|
+
signal_data=progress_data,
|
|
1429
|
+
agent_name=self.agent_name,
|
|
1430
|
+
part_metadata={"source_embed_type": "status_update"},
|
|
1431
|
+
)
|
|
1432
|
+
|
|
1433
|
+
await self._publish_status_update_with_buffer_flush(
|
|
1434
|
+
status_update_event,
|
|
1435
|
+
a2a_context,
|
|
1436
|
+
skip_buffer_flush=False,
|
|
1437
|
+
)
|
|
1438
|
+
|
|
1439
|
+
log.debug(
|
|
1440
|
+
"%s Published agent_status_message signal ('%s').",
|
|
1441
|
+
log_identifier_helper,
|
|
1442
|
+
status_text,
|
|
1443
|
+
)
|
|
1444
|
+
|
|
1445
|
+
except Exception as e:
|
|
1446
|
+
log.exception(
|
|
1447
|
+
"%s Error in _publish_agent_status_signal_update: %s",
|
|
1448
|
+
log_identifier_helper,
|
|
1449
|
+
e,
|
|
1450
|
+
)
|
|
1451
|
+
|
|
1452
|
+
async def _flush_buffer_if_needed(
|
|
1453
|
+
self, a2a_context: Dict, reason: str = "status_update"
|
|
1454
|
+
) -> bool:
|
|
1455
|
+
"""
|
|
1456
|
+
Flushes streaming buffer if it contains content.
|
|
1457
|
+
|
|
1458
|
+
Args:
|
|
1459
|
+
a2a_context: The A2A context dictionary for the current task
|
|
1460
|
+
reason: The reason for flushing (for logging purposes)
|
|
1461
|
+
|
|
1462
|
+
Returns:
|
|
1463
|
+
bool: True if buffer was flushed, False if no content to flush
|
|
1464
|
+
"""
|
|
1465
|
+
logical_task_id = a2a_context.get("logical_task_id", "unknown_task")
|
|
1466
|
+
log_identifier = f"{self.log_identifier}[BufferFlush:{logical_task_id}]"
|
|
1467
|
+
|
|
1468
|
+
with self.active_tasks_lock:
|
|
1469
|
+
task_context = self.active_tasks.get(logical_task_id)
|
|
1470
|
+
|
|
1471
|
+
if not task_context:
|
|
1472
|
+
log.warning(
|
|
1473
|
+
"%s TaskExecutionContext not found for task %s. Cannot flush buffer.",
|
|
1474
|
+
log_identifier,
|
|
1475
|
+
logical_task_id,
|
|
1476
|
+
)
|
|
1477
|
+
return False
|
|
1478
|
+
|
|
1479
|
+
buffer_content = task_context.get_streaming_buffer_content()
|
|
1480
|
+
if not buffer_content:
|
|
1481
|
+
log.debug(
|
|
1482
|
+
"%s No buffer content to flush (reason: %s).",
|
|
1483
|
+
log_identifier,
|
|
1484
|
+
reason,
|
|
1485
|
+
)
|
|
1486
|
+
return False
|
|
1487
|
+
|
|
1488
|
+
buffer_size = len(buffer_content.encode("utf-8"))
|
|
1489
|
+
log.info(
|
|
1490
|
+
"%s Flushing buffer content (size: %d bytes, reason: %s).",
|
|
1491
|
+
log_identifier,
|
|
1492
|
+
buffer_size,
|
|
1493
|
+
reason,
|
|
1494
|
+
)
|
|
1495
|
+
|
|
1496
|
+
try:
|
|
1497
|
+
resolved_text, unprocessed_tail = await self._flush_and_resolve_buffer(
|
|
1498
|
+
a2a_context, is_final=False
|
|
1499
|
+
)
|
|
1500
|
+
|
|
1501
|
+
if resolved_text:
|
|
1502
|
+
await self._publish_text_as_partial_a2a_status_update(
|
|
1503
|
+
resolved_text,
|
|
1504
|
+
a2a_context,
|
|
1505
|
+
is_stream_terminating_content=False,
|
|
1506
|
+
)
|
|
1507
|
+
log.debug(
|
|
1508
|
+
"%s Successfully flushed and published buffer content (resolved: %d bytes).",
|
|
1509
|
+
log_identifier,
|
|
1510
|
+
len(resolved_text.encode("utf-8")),
|
|
1511
|
+
)
|
|
1512
|
+
return True
|
|
1513
|
+
else:
|
|
1514
|
+
log.debug(
|
|
1515
|
+
"%s Buffer flush completed but no resolved text to publish.",
|
|
1516
|
+
log_identifier,
|
|
1517
|
+
)
|
|
1518
|
+
return False
|
|
1519
|
+
|
|
1520
|
+
except Exception as e:
|
|
1521
|
+
log.exception(
|
|
1522
|
+
"%s Error during buffer flush (reason: %s): %s",
|
|
1523
|
+
log_identifier,
|
|
1524
|
+
reason,
|
|
1525
|
+
e,
|
|
1526
|
+
)
|
|
1527
|
+
return False
|
|
1528
|
+
|
|
1529
|
+
async def _publish_status_update_with_buffer_flush(
|
|
1530
|
+
self,
|
|
1531
|
+
status_update_event: TaskStatusUpdateEvent,
|
|
1532
|
+
a2a_context: Dict,
|
|
1533
|
+
skip_buffer_flush: bool = False,
|
|
1534
|
+
) -> None:
|
|
1535
|
+
"""
|
|
1536
|
+
Central method for publishing status updates with automatic buffer flushing.
|
|
1537
|
+
|
|
1538
|
+
Args:
|
|
1539
|
+
status_update_event: The status update event to publish
|
|
1540
|
+
a2a_context: The A2A context dictionary for the current task
|
|
1541
|
+
skip_buffer_flush: If True, skip buffer flushing (used for LLM streaming text)
|
|
1542
|
+
"""
|
|
1543
|
+
logical_task_id = a2a_context.get("logical_task_id", "unknown_task")
|
|
1544
|
+
jsonrpc_request_id = a2a_context.get("jsonrpc_request_id")
|
|
1545
|
+
log_identifier = f"{self.log_identifier}[StatusUpdate:{logical_task_id}]"
|
|
1546
|
+
|
|
1547
|
+
status_type = "unknown"
|
|
1548
|
+
if status_update_event.metadata:
|
|
1549
|
+
if status_update_event.metadata.get("type") == "tool_invocation_start":
|
|
1550
|
+
status_type = "tool_invocation_start"
|
|
1551
|
+
elif "agent_name" in status_update_event.metadata:
|
|
1552
|
+
status_type = "agent_status"
|
|
1553
|
+
|
|
1554
|
+
if (
|
|
1555
|
+
status_update_event.status
|
|
1556
|
+
and status_update_event.status.message
|
|
1557
|
+
and status_update_event.status.message.parts
|
|
1558
|
+
):
|
|
1559
|
+
for part in status_update_event.status.message.parts:
|
|
1560
|
+
if hasattr(part, "data") and part.data:
|
|
1561
|
+
if part.data.get("a2a_signal_type") == "agent_status_message":
|
|
1562
|
+
status_type = "agent_status_signal"
|
|
1563
|
+
break
|
|
1564
|
+
elif "tool_error" in part.data:
|
|
1565
|
+
status_type = "tool_failure"
|
|
1566
|
+
break
|
|
1567
|
+
|
|
1568
|
+
log.debug(
|
|
1569
|
+
"%s Publishing status update (type: %s, skip_buffer_flush: %s).",
|
|
1570
|
+
log_identifier,
|
|
1571
|
+
status_type,
|
|
1572
|
+
skip_buffer_flush,
|
|
1573
|
+
)
|
|
1574
|
+
|
|
1575
|
+
if not skip_buffer_flush:
|
|
1576
|
+
buffer_was_flushed = await self._flush_buffer_if_needed(
|
|
1577
|
+
a2a_context, reason=f"before_{status_type}_status"
|
|
1578
|
+
)
|
|
1579
|
+
if buffer_was_flushed:
|
|
1580
|
+
log.info(
|
|
1581
|
+
"%s Buffer flushed before %s status update.",
|
|
1582
|
+
log_identifier,
|
|
1583
|
+
status_type,
|
|
1584
|
+
)
|
|
1585
|
+
|
|
1586
|
+
try:
|
|
1587
|
+
rpc_response = a2a.create_success_response(
|
|
1588
|
+
result=status_update_event, request_id=jsonrpc_request_id
|
|
1589
|
+
)
|
|
1590
|
+
payload_to_publish = rpc_response.model_dump(exclude_none=True)
|
|
1591
|
+
|
|
1592
|
+
target_topic = a2a_context.get(
|
|
1593
|
+
"statusTopic"
|
|
1594
|
+
) or a2a.get_gateway_status_topic(
|
|
1595
|
+
self.namespace, self.get_gateway_id(), logical_task_id
|
|
1596
|
+
)
|
|
1597
|
+
|
|
1598
|
+
# Construct user_properties to ensure ownership can be determined by gateways
|
|
1599
|
+
user_properties = {
|
|
1600
|
+
"a2aUserConfig": a2a_context.get("a2a_user_config"),
|
|
1601
|
+
"clientId": a2a_context.get("client_id"),
|
|
1602
|
+
"delegating_agent_name": self.get_config("agent_name"),
|
|
1603
|
+
}
|
|
1604
|
+
|
|
1605
|
+
self._publish_a2a_event(
|
|
1606
|
+
payload_to_publish, target_topic, a2a_context, user_properties
|
|
1607
|
+
)
|
|
1608
|
+
|
|
1609
|
+
log.debug(
|
|
1610
|
+
"%s Published %s status update to %s.",
|
|
1611
|
+
log_identifier,
|
|
1612
|
+
status_type,
|
|
1613
|
+
target_topic,
|
|
1614
|
+
)
|
|
1615
|
+
|
|
1616
|
+
except Exception as e:
|
|
1617
|
+
log.exception(
|
|
1618
|
+
"%s Error publishing %s status update: %s",
|
|
1619
|
+
log_identifier,
|
|
1620
|
+
status_type,
|
|
1621
|
+
e,
|
|
1622
|
+
)
|
|
1623
|
+
raise
|
|
1624
|
+
|
|
1625
|
+
async def _filter_text_from_final_streaming_event(
|
|
1626
|
+
self, adk_event: ADKEvent, a2a_context: Dict
|
|
1627
|
+
) -> ADKEvent:
|
|
1628
|
+
"""
|
|
1629
|
+
Filters out text parts from the final ADKEvent of a turn for PERSISTENT streaming sessions.
|
|
1630
|
+
This prevents sending redundant, aggregated text that was already streamed.
|
|
1631
|
+
Non-text parts like function calls are preserved.
|
|
1632
|
+
"""
|
|
1633
|
+
is_run_based_session = a2a_context.get("is_run_based_session", False)
|
|
1634
|
+
is_streaming = a2a_context.get("is_streaming", False)
|
|
1635
|
+
is_final_turn_event = not adk_event.partial
|
|
1636
|
+
has_content_parts = adk_event.content and adk_event.content.parts
|
|
1637
|
+
|
|
1638
|
+
# Only filter for PERSISTENT (not run-based) streaming sessions.
|
|
1639
|
+
if (
|
|
1640
|
+
not is_run_based_session
|
|
1641
|
+
and is_streaming
|
|
1642
|
+
and is_final_turn_event
|
|
1643
|
+
and has_content_parts
|
|
1644
|
+
):
|
|
1645
|
+
log_id = f"{self.log_identifier}[FilterFinalStreamEvent:{a2a_context.get('logical_task_id', 'unknown')}]"
|
|
1646
|
+
log.debug(
|
|
1647
|
+
"%s Filtering final streaming event to remove redundant text.", log_id
|
|
1648
|
+
)
|
|
1649
|
+
|
|
1650
|
+
non_text_parts = [
|
|
1651
|
+
part for part in adk_event.content.parts if part.text is None
|
|
1652
|
+
]
|
|
1653
|
+
|
|
1654
|
+
if len(non_text_parts) < len(adk_event.content.parts):
|
|
1655
|
+
event_copy = adk_event.model_copy(deep=True)
|
|
1656
|
+
event_copy.content = (
|
|
1657
|
+
adk_types.Content(parts=non_text_parts) if non_text_parts else None
|
|
1658
|
+
)
|
|
1659
|
+
log.info(
|
|
1660
|
+
"%s Removed text from final streaming event. Kept %d non-text part(s).",
|
|
1661
|
+
log_id,
|
|
1662
|
+
len(non_text_parts),
|
|
1663
|
+
)
|
|
1664
|
+
return event_copy
|
|
1665
|
+
|
|
1666
|
+
return adk_event
|
|
1667
|
+
|
|
1668
|
+
async def process_and_publish_adk_event(
|
|
1669
|
+
self, adk_event: ADKEvent, a2a_context: Dict
|
|
1670
|
+
):
|
|
1671
|
+
"""
|
|
1672
|
+
Main orchestrator for processing ADK events.
|
|
1673
|
+
Handles text buffering, embed resolution, and event routing based on
|
|
1674
|
+
whether the event is partial or the final event of a turn.
|
|
1675
|
+
"""
|
|
1676
|
+
logical_task_id = a2a_context.get("logical_task_id", "unknown_task")
|
|
1677
|
+
log_id_main = (
|
|
1678
|
+
f"{self.log_identifier}[ProcessADKEvent:{logical_task_id}:{adk_event.id}]"
|
|
1679
|
+
)
|
|
1680
|
+
log.debug(
|
|
1681
|
+
"%s Received ADKEvent (Partial: %s, Final Turn: %s).",
|
|
1682
|
+
log_id_main,
|
|
1683
|
+
adk_event.partial,
|
|
1684
|
+
not adk_event.partial,
|
|
1685
|
+
)
|
|
1686
|
+
|
|
1687
|
+
if adk_event.content and adk_event.content.parts:
|
|
1688
|
+
if any(
|
|
1689
|
+
p.function_response
|
|
1690
|
+
and p.function_response.name == "_continue_generation"
|
|
1691
|
+
for p in adk_event.content.parts
|
|
1692
|
+
):
|
|
1693
|
+
log.debug(
|
|
1694
|
+
"%s Discarding _continue_generation tool response event.",
|
|
1695
|
+
log_id_main,
|
|
1696
|
+
)
|
|
1697
|
+
return
|
|
1698
|
+
|
|
1699
|
+
if adk_event.custom_metadata and adk_event.custom_metadata.get(
|
|
1700
|
+
"was_interrupted"
|
|
1701
|
+
):
|
|
1702
|
+
log.debug(
|
|
1703
|
+
"%s Found 'was_interrupted' signal. Skipping event.",
|
|
1704
|
+
log_id_main,
|
|
1705
|
+
)
|
|
1706
|
+
return
|
|
1707
|
+
|
|
1708
|
+
with self.active_tasks_lock:
|
|
1709
|
+
task_context = self.active_tasks.get(logical_task_id)
|
|
1710
|
+
|
|
1711
|
+
if not task_context:
|
|
1712
|
+
log.error(
|
|
1713
|
+
"%s TaskExecutionContext not found for task %s. Cannot process ADK event.",
|
|
1714
|
+
log_id_main,
|
|
1715
|
+
logical_task_id,
|
|
1716
|
+
)
|
|
1717
|
+
return
|
|
1718
|
+
|
|
1719
|
+
is_run_based_session = a2a_context.get("is_run_based_session", False)
|
|
1720
|
+
is_final_turn_event = not adk_event.partial
|
|
1721
|
+
|
|
1722
|
+
try:
|
|
1723
|
+
from solace_agent_mesh_enterprise.auth.tool_auth import (
|
|
1724
|
+
handle_tool_auth_event,
|
|
1725
|
+
)
|
|
1726
|
+
|
|
1727
|
+
auth_status_update = await handle_tool_auth_event(
|
|
1728
|
+
adk_event, self, a2a_context
|
|
1729
|
+
)
|
|
1730
|
+
if auth_status_update:
|
|
1731
|
+
await self._publish_status_update_with_buffer_flush(
|
|
1732
|
+
auth_status_update,
|
|
1733
|
+
a2a_context,
|
|
1734
|
+
skip_buffer_flush=False,
|
|
1735
|
+
)
|
|
1736
|
+
return
|
|
1737
|
+
except ImportError:
|
|
1738
|
+
pass
|
|
1739
|
+
|
|
1740
|
+
if not is_final_turn_event:
|
|
1741
|
+
if adk_event.content and adk_event.content.parts:
|
|
1742
|
+
for part in adk_event.content.parts:
|
|
1743
|
+
if part.text is not None:
|
|
1744
|
+
# Check if this is a new turn by comparing invocation_id
|
|
1745
|
+
if adk_event.invocation_id:
|
|
1746
|
+
task_context.check_and_update_invocation(
|
|
1747
|
+
adk_event.invocation_id
|
|
1748
|
+
)
|
|
1749
|
+
is_first_text = task_context.is_first_text_in_turn()
|
|
1750
|
+
should_add_spacing = task_context.should_add_turn_spacing()
|
|
1751
|
+
|
|
1752
|
+
# Add spacing if this is the first text of a new turn
|
|
1753
|
+
# We add it BEFORE the text, regardless of current buffer content
|
|
1754
|
+
if should_add_spacing and is_first_text:
|
|
1755
|
+
# Add double newline to separate turns (new paragraph)
|
|
1756
|
+
task_context.append_to_streaming_buffer("\n\n")
|
|
1757
|
+
log.debug(
|
|
1758
|
+
"%s Added turn spacing before new invocation %s",
|
|
1759
|
+
log_id_main,
|
|
1760
|
+
adk_event.invocation_id,
|
|
1761
|
+
)
|
|
1762
|
+
|
|
1763
|
+
task_context.append_to_streaming_buffer(part.text)
|
|
1764
|
+
log.debug(
|
|
1765
|
+
"%s Appended text to buffer. New buffer size: %d bytes",
|
|
1766
|
+
log_id_main,
|
|
1767
|
+
len(
|
|
1768
|
+
task_context.get_streaming_buffer_content().encode(
|
|
1769
|
+
"utf-8"
|
|
1770
|
+
)
|
|
1771
|
+
),
|
|
1772
|
+
)
|
|
1773
|
+
|
|
1774
|
+
buffer_content = task_context.get_streaming_buffer_content()
|
|
1775
|
+
batching_disabled = self.stream_batching_threshold_bytes <= 0
|
|
1776
|
+
buffer_has_content = bool(buffer_content)
|
|
1777
|
+
threshold_met = (
|
|
1778
|
+
buffer_has_content
|
|
1779
|
+
and not batching_disabled
|
|
1780
|
+
and (
|
|
1781
|
+
len(buffer_content.encode("utf-8"))
|
|
1782
|
+
>= self.stream_batching_threshold_bytes
|
|
1783
|
+
)
|
|
1784
|
+
)
|
|
1785
|
+
|
|
1786
|
+
if buffer_has_content and (batching_disabled or threshold_met):
|
|
1787
|
+
log.debug(
|
|
1788
|
+
"%s Partial event triggered buffer flush due to size/batching config.",
|
|
1789
|
+
log_id_main,
|
|
1790
|
+
)
|
|
1791
|
+
resolved_text, _ = await self._flush_and_resolve_buffer(
|
|
1792
|
+
a2a_context, is_final=False
|
|
1793
|
+
)
|
|
1794
|
+
|
|
1795
|
+
if resolved_text:
|
|
1796
|
+
if is_run_based_session:
|
|
1797
|
+
task_context.append_to_run_based_buffer(resolved_text)
|
|
1798
|
+
log.debug(
|
|
1799
|
+
"%s [RUN_BASED] Appended %d bytes to run_based_response_buffer.",
|
|
1800
|
+
log_id_main,
|
|
1801
|
+
len(resolved_text.encode("utf-8")),
|
|
1802
|
+
)
|
|
1803
|
+
else:
|
|
1804
|
+
await self._publish_text_as_partial_a2a_status_update(
|
|
1805
|
+
resolved_text, a2a_context
|
|
1806
|
+
)
|
|
1807
|
+
else:
|
|
1808
|
+
buffer_content = task_context.get_streaming_buffer_content()
|
|
1809
|
+
if buffer_content:
|
|
1810
|
+
log.debug(
|
|
1811
|
+
"%s Final event triggered flush of remaining buffer content.",
|
|
1812
|
+
log_id_main,
|
|
1813
|
+
)
|
|
1814
|
+
resolved_text, _ = await self._flush_and_resolve_buffer(
|
|
1815
|
+
a2a_context, is_final=True
|
|
1816
|
+
)
|
|
1817
|
+
if resolved_text:
|
|
1818
|
+
if is_run_based_session:
|
|
1819
|
+
task_context.append_to_run_based_buffer(resolved_text)
|
|
1820
|
+
log.debug(
|
|
1821
|
+
"%s [RUN_BASED] Appended final %d bytes to run_based_response_buffer.",
|
|
1822
|
+
log_id_main,
|
|
1823
|
+
len(resolved_text.encode("utf-8")),
|
|
1824
|
+
)
|
|
1825
|
+
else:
|
|
1826
|
+
await self._publish_text_as_partial_a2a_status_update(
|
|
1827
|
+
resolved_text, a2a_context
|
|
1828
|
+
)
|
|
1829
|
+
|
|
1830
|
+
# Prepare and publish the final event for observability
|
|
1831
|
+
event_to_publish = await self._filter_text_from_final_streaming_event(
|
|
1832
|
+
adk_event, a2a_context
|
|
1833
|
+
)
|
|
1834
|
+
|
|
1835
|
+
(
|
|
1836
|
+
a2a_payload,
|
|
1837
|
+
target_topic,
|
|
1838
|
+
user_properties,
|
|
1839
|
+
_,
|
|
1840
|
+
) = await format_and_route_adk_event(event_to_publish, a2a_context, self)
|
|
1841
|
+
|
|
1842
|
+
if a2a_payload and target_topic:
|
|
1843
|
+
self._publish_a2a_event(a2a_payload, target_topic, a2a_context)
|
|
1844
|
+
log.debug(
|
|
1845
|
+
"%s Published final turn event (e.g., tool call) to %s.",
|
|
1846
|
+
log_id_main,
|
|
1847
|
+
target_topic,
|
|
1848
|
+
)
|
|
1849
|
+
else:
|
|
1850
|
+
log.debug(
|
|
1851
|
+
"%s Final turn event did not result in a publishable A2A message.",
|
|
1852
|
+
log_id_main,
|
|
1853
|
+
)
|
|
1854
|
+
|
|
1855
|
+
await self._handle_artifact_return_signals(adk_event, a2a_context)
|
|
1856
|
+
|
|
1857
|
+
async def _flush_and_resolve_buffer(
|
|
1858
|
+
self, a2a_context: Dict, is_final: bool
|
|
1859
|
+
) -> Tuple[str, str]:
|
|
1860
|
+
"""Flushes buffer, resolves embeds, handles signals, returns (resolved_text, unprocessed_tail)."""
|
|
1861
|
+
logical_task_id = a2a_context.get("logical_task_id", "unknown_task")
|
|
1862
|
+
log_id = f"{self.log_identifier}[FlushBuffer:{logical_task_id}]"
|
|
1863
|
+
|
|
1864
|
+
with self.active_tasks_lock:
|
|
1865
|
+
task_context = self.active_tasks.get(logical_task_id)
|
|
1866
|
+
|
|
1867
|
+
if not task_context:
|
|
1868
|
+
log.error(
|
|
1869
|
+
"%s TaskExecutionContext not found for task %s. Cannot flush/resolve buffer.",
|
|
1870
|
+
log_id,
|
|
1871
|
+
logical_task_id,
|
|
1872
|
+
)
|
|
1873
|
+
return "", ""
|
|
1874
|
+
|
|
1875
|
+
text_to_process = task_context.flush_streaming_buffer()
|
|
1876
|
+
|
|
1877
|
+
resolved_text, signals_found, unprocessed_tail = (
|
|
1878
|
+
await self._resolve_early_embeds_and_handle_signals(
|
|
1879
|
+
text_to_process, a2a_context
|
|
1880
|
+
)
|
|
1881
|
+
)
|
|
1882
|
+
|
|
1883
|
+
if not is_final:
|
|
1884
|
+
if unprocessed_tail:
|
|
1885
|
+
task_context.append_to_streaming_buffer(unprocessed_tail)
|
|
1886
|
+
log.debug(
|
|
1887
|
+
"%s Placed unprocessed tail (length %d) back into buffer.",
|
|
1888
|
+
log_id,
|
|
1889
|
+
len(unprocessed_tail.encode("utf-8")),
|
|
1890
|
+
)
|
|
1891
|
+
else:
|
|
1892
|
+
if unprocessed_tail is not None and unprocessed_tail != "":
|
|
1893
|
+
resolved_text = resolved_text + unprocessed_tail
|
|
1894
|
+
|
|
1895
|
+
if signals_found:
|
|
1896
|
+
log.info(
|
|
1897
|
+
"%s Handling %d signals from buffer resolution.",
|
|
1898
|
+
log_id,
|
|
1899
|
+
len(signals_found),
|
|
1900
|
+
)
|
|
1901
|
+
for _signal_index, signal_data_tuple, _placeholder in signals_found:
|
|
1902
|
+
if (
|
|
1903
|
+
isinstance(signal_data_tuple, tuple)
|
|
1904
|
+
and len(signal_data_tuple) == 3
|
|
1905
|
+
and signal_data_tuple[0] is None
|
|
1906
|
+
and signal_data_tuple[1] == "SIGNAL_STATUS_UPDATE"
|
|
1907
|
+
):
|
|
1908
|
+
status_text = signal_data_tuple[2]
|
|
1909
|
+
log.info(
|
|
1910
|
+
"%s Publishing SIGNAL_STATUS_UPDATE from buffer: '%s'",
|
|
1911
|
+
log_id,
|
|
1912
|
+
status_text,
|
|
1913
|
+
)
|
|
1914
|
+
await self._publish_agent_status_signal_update(
|
|
1915
|
+
status_text, a2a_context
|
|
1916
|
+
)
|
|
1917
|
+
resolved_text = resolved_text.replace(_placeholder, "")
|
|
1918
|
+
|
|
1919
|
+
return resolved_text, unprocessed_tail
|
|
1920
|
+
|
|
1921
|
+
async def _handle_artifact_return_signals(
|
|
1922
|
+
self, adk_event: ADKEvent, a2a_context: Dict
|
|
1923
|
+
):
|
|
1924
|
+
"""
|
|
1925
|
+
Processes artifact return signals.
|
|
1926
|
+
This method is triggered by a placeholder in state_delta, but reads the
|
|
1927
|
+
actual list of signals from the TaskExecutionContext.
|
|
1928
|
+
"""
|
|
1929
|
+
logical_task_id = a2a_context.get("logical_task_id", "unknown_task")
|
|
1930
|
+
log_id = f"{self.log_identifier}[ArtifactSignals:{logical_task_id}]"
|
|
1931
|
+
|
|
1932
|
+
# Check for the trigger in state_delta. The presence of any key is enough.
|
|
1933
|
+
has_signal_trigger = (
|
|
1934
|
+
adk_event.actions
|
|
1935
|
+
and adk_event.actions.state_delta
|
|
1936
|
+
and any(
|
|
1937
|
+
k.startswith("temp:a2a_return_artifact:")
|
|
1938
|
+
for k in adk_event.actions.state_delta
|
|
1939
|
+
)
|
|
1940
|
+
)
|
|
1941
|
+
|
|
1942
|
+
if not has_signal_trigger:
|
|
1943
|
+
return
|
|
1944
|
+
|
|
1945
|
+
with self.active_tasks_lock:
|
|
1946
|
+
task_context = self.active_tasks.get(logical_task_id)
|
|
1947
|
+
|
|
1948
|
+
if not task_context:
|
|
1949
|
+
log.warning(
|
|
1950
|
+
"%s No TaskExecutionContext found for task %s. Cannot process artifact signals.",
|
|
1951
|
+
log_id,
|
|
1952
|
+
logical_task_id,
|
|
1953
|
+
)
|
|
1954
|
+
return
|
|
1955
|
+
|
|
1956
|
+
all_signals = task_context.get_and_clear_artifact_signals()
|
|
1957
|
+
|
|
1958
|
+
if not all_signals:
|
|
1959
|
+
log.info(
|
|
1960
|
+
"%s Triggered for artifact signals, but none were found in the execution context.",
|
|
1961
|
+
log_id,
|
|
1962
|
+
)
|
|
1963
|
+
return
|
|
1964
|
+
|
|
1965
|
+
log.info(
|
|
1966
|
+
"%s Found %d artifact return signal(s) in the execution context.",
|
|
1967
|
+
log_id,
|
|
1968
|
+
len(all_signals),
|
|
1969
|
+
)
|
|
1970
|
+
|
|
1971
|
+
original_session_id = a2a_context.get("session_id")
|
|
1972
|
+
user_id = a2a_context.get("user_id")
|
|
1973
|
+
adk_app_name = self.get_config("agent_name")
|
|
1974
|
+
|
|
1975
|
+
peer_status_topic = a2a_context.get("statusTopic")
|
|
1976
|
+
namespace = self.get_config("namespace")
|
|
1977
|
+
gateway_id = self.get_gateway_id()
|
|
1978
|
+
|
|
1979
|
+
artifact_topic = peer_status_topic or a2a.get_gateway_status_topic(
|
|
1980
|
+
namespace, gateway_id, logical_task_id
|
|
1981
|
+
)
|
|
1982
|
+
|
|
1983
|
+
if not self.artifact_service:
|
|
1984
|
+
log.error("%s Artifact service not available.", log_id)
|
|
1985
|
+
return
|
|
1986
|
+
if not artifact_topic:
|
|
1987
|
+
log.error("%s Could not determine artifact topic.", log_id)
|
|
1988
|
+
return
|
|
1989
|
+
|
|
1990
|
+
for item in all_signals:
|
|
1991
|
+
try:
|
|
1992
|
+
filename = item["filename"]
|
|
1993
|
+
version = item["version"]
|
|
1994
|
+
|
|
1995
|
+
log.info(
|
|
1996
|
+
"%s Processing artifact return signal for '%s' v%d from context.",
|
|
1997
|
+
log_id,
|
|
1998
|
+
filename,
|
|
1999
|
+
version,
|
|
2000
|
+
)
|
|
2001
|
+
|
|
2002
|
+
loaded_adk_part = await self.artifact_service.load_artifact(
|
|
2003
|
+
app_name=adk_app_name,
|
|
2004
|
+
user_id=user_id,
|
|
2005
|
+
session_id=original_session_id,
|
|
2006
|
+
filename=filename,
|
|
2007
|
+
version=version,
|
|
2008
|
+
)
|
|
2009
|
+
|
|
2010
|
+
if not loaded_adk_part:
|
|
2011
|
+
log.warning(
|
|
2012
|
+
"%s Failed to load artifact '%s' v%d.",
|
|
2013
|
+
log_id,
|
|
2014
|
+
filename,
|
|
2015
|
+
version,
|
|
2016
|
+
)
|
|
2017
|
+
continue
|
|
2018
|
+
|
|
2019
|
+
a2a_file_part = await a2a.translate_adk_part_to_a2a_filepart(
|
|
2020
|
+
adk_part=loaded_adk_part,
|
|
2021
|
+
filename=filename,
|
|
2022
|
+
a2a_context=a2a_context,
|
|
2023
|
+
artifact_service=self.artifact_service,
|
|
2024
|
+
artifact_handling_mode=self.artifact_handling_mode,
|
|
2025
|
+
adk_app_name=self.get_config("agent_name"),
|
|
2026
|
+
log_identifier=self.log_identifier,
|
|
2027
|
+
version=version,
|
|
2028
|
+
)
|
|
2029
|
+
|
|
2030
|
+
if a2a_file_part:
|
|
2031
|
+
a2a_message = a2a.create_agent_parts_message(
|
|
2032
|
+
parts=[a2a_file_part],
|
|
2033
|
+
task_id=logical_task_id,
|
|
2034
|
+
context_id=original_session_id,
|
|
2035
|
+
)
|
|
2036
|
+
task_status = a2a.create_task_status(
|
|
2037
|
+
state=TaskState.working, message=a2a_message
|
|
2038
|
+
)
|
|
2039
|
+
status_update_event = TaskStatusUpdateEvent(
|
|
2040
|
+
task_id=logical_task_id,
|
|
2041
|
+
context_id=original_session_id,
|
|
2042
|
+
status=task_status,
|
|
2043
|
+
final=False,
|
|
2044
|
+
kind="status-update",
|
|
2045
|
+
)
|
|
2046
|
+
artifact_payload = a2a.create_success_response(
|
|
2047
|
+
result=status_update_event,
|
|
2048
|
+
request_id=a2a_context.get("jsonrpc_request_id"),
|
|
2049
|
+
).model_dump(exclude_none=True)
|
|
2050
|
+
|
|
2051
|
+
self._publish_a2a_event(
|
|
2052
|
+
artifact_payload, artifact_topic, a2a_context
|
|
2053
|
+
)
|
|
2054
|
+
|
|
2055
|
+
log.info(
|
|
2056
|
+
"%s Published TaskStatusUpdateEvent with FilePart for '%s' to %s",
|
|
2057
|
+
log_id,
|
|
2058
|
+
filename,
|
|
2059
|
+
artifact_topic,
|
|
2060
|
+
)
|
|
2061
|
+
else:
|
|
2062
|
+
log.warning(
|
|
2063
|
+
"%s Failed to translate artifact '%s' v%d to A2A FilePart.",
|
|
2064
|
+
log_id,
|
|
2065
|
+
filename,
|
|
2066
|
+
version,
|
|
2067
|
+
)
|
|
2068
|
+
|
|
2069
|
+
except Exception as e:
|
|
2070
|
+
log.exception(
|
|
2071
|
+
"%s Error processing artifact signal item %s from context: %s",
|
|
2072
|
+
log_id,
|
|
2073
|
+
item,
|
|
2074
|
+
e,
|
|
2075
|
+
)
|
|
2076
|
+
|
|
2077
|
+
def _format_final_task_status(
|
|
2078
|
+
self, last_event: Optional[ADKEvent], override_text: Optional[str] = None
|
|
2079
|
+
) -> TaskStatus:
|
|
2080
|
+
"""Helper to format the final TaskStatus based on the last ADK event."""
|
|
2081
|
+
log.debug(
|
|
2082
|
+
"%s Formatting final task status from last ADK event %s",
|
|
2083
|
+
self.log_identifier,
|
|
2084
|
+
last_event.id if last_event else "None",
|
|
2085
|
+
)
|
|
2086
|
+
a2a_state = TaskState.completed
|
|
2087
|
+
a2a_parts = []
|
|
2088
|
+
|
|
2089
|
+
if override_text is not None:
|
|
2090
|
+
a2a_parts.append(a2a.create_text_part(text=override_text))
|
|
2091
|
+
# Add non-text parts from the last event
|
|
2092
|
+
if last_event and last_event.content and last_event.content.parts:
|
|
2093
|
+
for part in last_event.content.parts:
|
|
2094
|
+
if part.text is None:
|
|
2095
|
+
if part.function_response:
|
|
2096
|
+
a2a_parts.extend(
|
|
2097
|
+
a2a.translate_adk_function_response_to_a2a_parts(part)
|
|
2098
|
+
)
|
|
2099
|
+
else:
|
|
2100
|
+
# Original logic
|
|
2101
|
+
if last_event and last_event.content and last_event.content.parts:
|
|
2102
|
+
for part in last_event.content.parts:
|
|
2103
|
+
if part.text:
|
|
2104
|
+
a2a_parts.append(a2a.create_text_part(text=part.text))
|
|
2105
|
+
elif part.function_response:
|
|
2106
|
+
a2a_parts.extend(
|
|
2107
|
+
a2a.translate_adk_function_response_to_a2a_parts(part)
|
|
2108
|
+
)
|
|
2109
|
+
|
|
2110
|
+
if last_event and last_event.actions:
|
|
2111
|
+
if last_event.actions.requested_auth_configs:
|
|
2112
|
+
a2a_state = TaskState.input_required
|
|
2113
|
+
a2a_parts.append(
|
|
2114
|
+
a2a.create_text_part(text="[Agent requires input/authentication]")
|
|
2115
|
+
)
|
|
2116
|
+
|
|
2117
|
+
if not a2a_parts:
|
|
2118
|
+
a2a_message = a2a.create_agent_text_message(text="")
|
|
2119
|
+
else:
|
|
2120
|
+
a2a_message = a2a.create_agent_parts_message(parts=a2a_parts)
|
|
2121
|
+
return a2a.create_task_status(state=a2a_state, message=a2a_message)
|
|
2122
|
+
|
|
2123
|
+
async def finalize_task_success(self, a2a_context: Dict):
|
|
2124
|
+
"""
|
|
2125
|
+
Finalizes a task successfully. Fetches final state, publishes final A2A response,
|
|
2126
|
+
and ACKs the original message.
|
|
2127
|
+
For RUN_BASED tasks, it uses the aggregated response buffer.
|
|
2128
|
+
For STREAMING tasks, it uses the content of the last ADK event.
|
|
2129
|
+
"""
|
|
2130
|
+
logical_task_id = a2a_context.get("logical_task_id")
|
|
2131
|
+
|
|
2132
|
+
# Retrieve the original Solace message from TaskExecutionContext
|
|
2133
|
+
original_message: Optional[SolaceMessage] = None
|
|
2134
|
+
with self.active_tasks_lock:
|
|
2135
|
+
task_context = self.active_tasks.get(logical_task_id)
|
|
2136
|
+
if task_context:
|
|
2137
|
+
original_message = task_context.get_original_solace_message()
|
|
2138
|
+
|
|
2139
|
+
log.info(
|
|
2140
|
+
"%s Finalizing task %s successfully.", self.log_identifier, logical_task_id
|
|
2141
|
+
)
|
|
2142
|
+
try:
|
|
2143
|
+
session_id_to_retrieve = a2a_context.get(
|
|
2144
|
+
"effective_session_id", a2a_context.get("session_id")
|
|
2145
|
+
)
|
|
2146
|
+
original_session_id = a2a_context.get("session_id")
|
|
2147
|
+
user_id = a2a_context.get("user_id")
|
|
2148
|
+
client_id = a2a_context.get("client_id")
|
|
2149
|
+
jsonrpc_request_id = a2a_context.get("jsonrpc_request_id")
|
|
2150
|
+
peer_reply_topic = a2a_context.get("replyToTopic")
|
|
2151
|
+
namespace = self.get_config("namespace")
|
|
2152
|
+
agent_name = self.get_config("agent_name")
|
|
2153
|
+
is_run_based_session = a2a_context.get("is_run_based_session", False)
|
|
2154
|
+
|
|
2155
|
+
final_status: TaskStatus
|
|
2156
|
+
|
|
2157
|
+
with self.active_tasks_lock:
|
|
2158
|
+
task_context = self.active_tasks.get(logical_task_id)
|
|
2159
|
+
|
|
2160
|
+
final_adk_session = await self.session_service.get_session(
|
|
2161
|
+
app_name=agent_name,
|
|
2162
|
+
user_id=user_id,
|
|
2163
|
+
session_id=session_id_to_retrieve,
|
|
2164
|
+
)
|
|
2165
|
+
if not final_adk_session:
|
|
2166
|
+
raise RuntimeError(
|
|
2167
|
+
f"Could not retrieve final session state for {session_id_to_retrieve}"
|
|
2168
|
+
)
|
|
2169
|
+
|
|
2170
|
+
last_event = (
|
|
2171
|
+
final_adk_session.events[-1] if final_adk_session.events else None
|
|
2172
|
+
)
|
|
2173
|
+
|
|
2174
|
+
if is_run_based_session:
|
|
2175
|
+
aggregated_text = ""
|
|
2176
|
+
if task_context:
|
|
2177
|
+
aggregated_text = task_context.run_based_response_buffer
|
|
2178
|
+
log.info(
|
|
2179
|
+
"%s Using aggregated response buffer for RUN_BASED task %s (length: %d bytes).",
|
|
2180
|
+
self.log_identifier,
|
|
2181
|
+
logical_task_id,
|
|
2182
|
+
len(aggregated_text.encode("utf-8")),
|
|
2183
|
+
)
|
|
2184
|
+
final_status = self._format_final_task_status(
|
|
2185
|
+
last_event, override_text=aggregated_text
|
|
2186
|
+
)
|
|
2187
|
+
else:
|
|
2188
|
+
if last_event:
|
|
2189
|
+
final_status = self._format_final_task_status(last_event)
|
|
2190
|
+
else:
|
|
2191
|
+
final_status = a2a.create_task_status(
|
|
2192
|
+
state=TaskState.completed,
|
|
2193
|
+
message=a2a.create_agent_text_message(text="Task completed."),
|
|
2194
|
+
)
|
|
2195
|
+
|
|
2196
|
+
final_a2a_artifacts: List[A2AArtifact] = []
|
|
2197
|
+
log.debug(
|
|
2198
|
+
"%s Final artifact bundling is removed. Artifacts sent via TaskArtifactUpdateEvent.",
|
|
2199
|
+
self.log_identifier,
|
|
2200
|
+
)
|
|
2201
|
+
|
|
2202
|
+
final_task_metadata = {"agent_name": agent_name}
|
|
2203
|
+
if task_context and task_context.produced_artifacts:
|
|
2204
|
+
final_task_metadata["produced_artifacts"] = (
|
|
2205
|
+
task_context.produced_artifacts
|
|
2206
|
+
)
|
|
2207
|
+
log.info(
|
|
2208
|
+
"%s Attaching manifest of %d produced artifacts to final task metadata.",
|
|
2209
|
+
self.log_identifier,
|
|
2210
|
+
len(task_context.produced_artifacts),
|
|
2211
|
+
)
|
|
2212
|
+
|
|
2213
|
+
# Add token usage summary
|
|
2214
|
+
if task_context:
|
|
2215
|
+
token_summary = task_context.get_token_usage_summary()
|
|
2216
|
+
if token_summary["total_tokens"] > 0:
|
|
2217
|
+
final_task_metadata["token_usage"] = token_summary
|
|
2218
|
+
log.info(
|
|
2219
|
+
"%s Task %s used %d total tokens (input: %d, output: %d, cached: %d)",
|
|
2220
|
+
self.log_identifier,
|
|
2221
|
+
logical_task_id,
|
|
2222
|
+
token_summary["total_tokens"],
|
|
2223
|
+
token_summary["total_input_tokens"],
|
|
2224
|
+
token_summary["total_output_tokens"],
|
|
2225
|
+
token_summary["total_cached_input_tokens"],
|
|
2226
|
+
)
|
|
2227
|
+
|
|
2228
|
+
final_task = a2a.create_final_task(
|
|
2229
|
+
task_id=logical_task_id,
|
|
2230
|
+
context_id=original_session_id,
|
|
2231
|
+
final_status=final_status,
|
|
2232
|
+
artifacts=(final_a2a_artifacts if final_a2a_artifacts else None),
|
|
2233
|
+
metadata=final_task_metadata,
|
|
2234
|
+
)
|
|
2235
|
+
final_response = a2a.create_success_response(
|
|
2236
|
+
result=final_task, request_id=jsonrpc_request_id
|
|
2237
|
+
)
|
|
2238
|
+
a2a_payload = final_response.model_dump(exclude_none=True)
|
|
2239
|
+
target_topic = peer_reply_topic or a2a.get_client_response_topic(
|
|
2240
|
+
namespace, client_id
|
|
2241
|
+
)
|
|
2242
|
+
|
|
2243
|
+
self._publish_a2a_event(a2a_payload, target_topic, a2a_context)
|
|
2244
|
+
log.info(
|
|
2245
|
+
"%s Published final successful response for task %s to %s (Artifacts NOT bundled).",
|
|
2246
|
+
self.log_identifier,
|
|
2247
|
+
logical_task_id,
|
|
2248
|
+
target_topic,
|
|
2249
|
+
)
|
|
2250
|
+
if original_message:
|
|
2251
|
+
try:
|
|
2252
|
+
original_message.call_acknowledgements()
|
|
2253
|
+
log.info(
|
|
2254
|
+
"%s Called ACK for original message of task %s.",
|
|
2255
|
+
self.log_identifier,
|
|
2256
|
+
logical_task_id,
|
|
2257
|
+
)
|
|
2258
|
+
except Exception as ack_e:
|
|
2259
|
+
log.error(
|
|
2260
|
+
"%s Failed to call ACK for task %s: %s",
|
|
2261
|
+
self.log_identifier,
|
|
2262
|
+
logical_task_id,
|
|
2263
|
+
ack_e,
|
|
2264
|
+
)
|
|
2265
|
+
else:
|
|
2266
|
+
log.warning(
|
|
2267
|
+
"%s Original Solace message not found in context for task %s. Cannot ACK.",
|
|
2268
|
+
self.log_identifier,
|
|
2269
|
+
logical_task_id,
|
|
2270
|
+
)
|
|
2271
|
+
|
|
2272
|
+
except Exception as e:
|
|
2273
|
+
log.exception(
|
|
2274
|
+
"%s Error during successful finalization of task %s: %s",
|
|
2275
|
+
self.log_identifier,
|
|
2276
|
+
logical_task_id,
|
|
2277
|
+
e,
|
|
2278
|
+
)
|
|
2279
|
+
if original_message:
|
|
2280
|
+
try:
|
|
2281
|
+
original_message.call_negative_acknowledgements()
|
|
2282
|
+
log.warning(
|
|
2283
|
+
"%s Called NACK for original message of task %s due to finalization error.",
|
|
2284
|
+
self.log_identifier,
|
|
2285
|
+
logical_task_id,
|
|
2286
|
+
)
|
|
2287
|
+
except Exception as nack_e:
|
|
2288
|
+
log.error(
|
|
2289
|
+
"%s Failed to call NACK for task %s after finalization error: %s",
|
|
2290
|
+
self.log_identifier,
|
|
2291
|
+
logical_task_id,
|
|
2292
|
+
nack_e,
|
|
2293
|
+
)
|
|
2294
|
+
else:
|
|
2295
|
+
log.warning(
|
|
2296
|
+
"%s Original Solace message not found in context for task %s during finalization error. Cannot NACK.",
|
|
2297
|
+
self.log_identifier,
|
|
2298
|
+
logical_task_id,
|
|
2299
|
+
)
|
|
2300
|
+
|
|
2301
|
+
try:
|
|
2302
|
+
jsonrpc_request_id = a2a_context.get("jsonrpc_request_id")
|
|
2303
|
+
client_id = a2a_context.get("client_id")
|
|
2304
|
+
peer_reply_topic = a2a_context.get("replyToTopic")
|
|
2305
|
+
namespace = self.get_config("namespace")
|
|
2306
|
+
error_response = a2a.create_internal_error_response(
|
|
2307
|
+
message=f"Failed to finalize successful task: {e}",
|
|
2308
|
+
request_id=jsonrpc_request_id,
|
|
2309
|
+
data={"taskId": logical_task_id},
|
|
2310
|
+
)
|
|
2311
|
+
target_topic = peer_reply_topic or a2a.get_client_response_topic(
|
|
2312
|
+
namespace, client_id
|
|
2313
|
+
)
|
|
2314
|
+
self.publish_a2a_message(
|
|
2315
|
+
error_response.model_dump(exclude_none=True), target_topic
|
|
2316
|
+
)
|
|
2317
|
+
except Exception as report_err:
|
|
2318
|
+
log.error(
|
|
2319
|
+
"%s Failed to report finalization error for task %s: %s",
|
|
2320
|
+
self.log_identifier,
|
|
2321
|
+
logical_task_id,
|
|
2322
|
+
report_err,
|
|
2323
|
+
)
|
|
2324
|
+
|
|
2325
|
+
def finalize_task_canceled(self, a2a_context: Dict):
|
|
2326
|
+
"""
|
|
2327
|
+
Finalizes a task as CANCELED. Publishes A2A Task response with CANCELED state
|
|
2328
|
+
and ACKs the original message if available.
|
|
2329
|
+
Called by the background ADK thread wrapper when a task is cancelled.
|
|
2330
|
+
"""
|
|
2331
|
+
logical_task_id = a2a_context.get("logical_task_id")
|
|
2332
|
+
|
|
2333
|
+
# Retrieve the original Solace message from TaskExecutionContext
|
|
2334
|
+
original_message: Optional[SolaceMessage] = None
|
|
2335
|
+
with self.active_tasks_lock:
|
|
2336
|
+
task_context = self.active_tasks.get(logical_task_id)
|
|
2337
|
+
if task_context:
|
|
2338
|
+
original_message = task_context.get_original_solace_message()
|
|
2339
|
+
|
|
2340
|
+
log.info(
|
|
2341
|
+
"%s Finalizing task %s as CANCELED.", self.log_identifier, logical_task_id
|
|
2342
|
+
)
|
|
2343
|
+
try:
|
|
2344
|
+
jsonrpc_request_id = a2a_context.get("jsonrpc_request_id")
|
|
2345
|
+
client_id = a2a_context.get("client_id")
|
|
2346
|
+
peer_reply_topic = a2a_context.get("replyToTopic")
|
|
2347
|
+
namespace = self.get_config("namespace")
|
|
2348
|
+
|
|
2349
|
+
canceled_status = a2a.create_task_status(
|
|
2350
|
+
state=TaskState.canceled,
|
|
2351
|
+
message=a2a.create_agent_text_message(
|
|
2352
|
+
text="Task cancelled by request."
|
|
2353
|
+
),
|
|
2354
|
+
)
|
|
2355
|
+
agent_name = self.get_config("agent_name")
|
|
2356
|
+
final_task = a2a.create_final_task(
|
|
2357
|
+
task_id=logical_task_id,
|
|
2358
|
+
context_id=a2a_context.get("contextId"),
|
|
2359
|
+
final_status=canceled_status,
|
|
2360
|
+
metadata={"agent_name": agent_name},
|
|
2361
|
+
)
|
|
2362
|
+
final_response = a2a.create_success_response(
|
|
2363
|
+
result=final_task, request_id=jsonrpc_request_id
|
|
2364
|
+
)
|
|
2365
|
+
a2a_payload = final_response.model_dump(exclude_none=True)
|
|
2366
|
+
target_topic = peer_reply_topic or a2a.get_client_response_topic(
|
|
2367
|
+
namespace, client_id
|
|
2368
|
+
)
|
|
2369
|
+
|
|
2370
|
+
self._publish_a2a_event(a2a_payload, target_topic, a2a_context)
|
|
2371
|
+
log.info(
|
|
2372
|
+
"%s Published final CANCELED response for task %s to %s.",
|
|
2373
|
+
self.log_identifier,
|
|
2374
|
+
logical_task_id,
|
|
2375
|
+
target_topic,
|
|
2376
|
+
)
|
|
2377
|
+
|
|
2378
|
+
if original_message:
|
|
2379
|
+
try:
|
|
2380
|
+
original_message.call_acknowledgements()
|
|
2381
|
+
log.info(
|
|
2382
|
+
"%s Called ACK for original message of cancelled task %s.",
|
|
2383
|
+
self.log_identifier,
|
|
2384
|
+
logical_task_id,
|
|
2385
|
+
)
|
|
2386
|
+
except Exception as ack_e:
|
|
2387
|
+
log.error(
|
|
2388
|
+
"%s Failed to call ACK for cancelled task %s: %s",
|
|
2389
|
+
self.log_identifier,
|
|
2390
|
+
logical_task_id,
|
|
2391
|
+
ack_e,
|
|
2392
|
+
)
|
|
2393
|
+
else:
|
|
2394
|
+
log.warning(
|
|
2395
|
+
"%s Original Solace message not found in context for cancelled task %s. Cannot ACK.",
|
|
2396
|
+
self.log_identifier,
|
|
2397
|
+
logical_task_id,
|
|
2398
|
+
)
|
|
2399
|
+
|
|
2400
|
+
except Exception as e:
|
|
2401
|
+
log.exception(
|
|
2402
|
+
"%s Error during CANCELED finalization of task %s: %s",
|
|
2403
|
+
self.log_identifier,
|
|
2404
|
+
logical_task_id,
|
|
2405
|
+
e,
|
|
2406
|
+
)
|
|
2407
|
+
if original_message:
|
|
2408
|
+
try:
|
|
2409
|
+
original_message.call_negative_acknowledgements()
|
|
2410
|
+
except Exception:
|
|
2411
|
+
pass
|
|
2412
|
+
|
|
2413
|
+
async def _publish_tool_failure_status(
|
|
2414
|
+
self, exception: Exception, a2a_context: Dict
|
|
2415
|
+
):
|
|
2416
|
+
"""
|
|
2417
|
+
Publishes an intermediate status update indicating a tool execution has failed.
|
|
2418
|
+
This method will flush the buffer before publishing to maintain proper message ordering.
|
|
2419
|
+
"""
|
|
2420
|
+
logical_task_id = a2a_context.get("logical_task_id")
|
|
2421
|
+
log_identifier_helper = (
|
|
2422
|
+
f"{self.log_identifier}[ToolFailureStatus:{logical_task_id}]"
|
|
2423
|
+
)
|
|
2424
|
+
try:
|
|
2425
|
+
# Create the status update event
|
|
2426
|
+
tool_error_data_part = a2a.create_data_part(
|
|
2427
|
+
data={
|
|
2428
|
+
"a2a_signal_type": "tool_execution_error",
|
|
2429
|
+
"error_message": str(exception),
|
|
2430
|
+
"details": "An unhandled exception occurred during tool execution.",
|
|
2431
|
+
}
|
|
2432
|
+
)
|
|
2433
|
+
|
|
2434
|
+
status_message = a2a.create_agent_parts_message(
|
|
2435
|
+
parts=[tool_error_data_part],
|
|
2436
|
+
task_id=logical_task_id,
|
|
2437
|
+
context_id=a2a_context.get("contextId"),
|
|
2438
|
+
)
|
|
2439
|
+
status_update_event = a2a.create_status_update(
|
|
2440
|
+
task_id=logical_task_id,
|
|
2441
|
+
context_id=a2a_context.get("contextId"),
|
|
2442
|
+
message=status_message,
|
|
2443
|
+
is_final=False,
|
|
2444
|
+
metadata={"agent_name": self.get_config("agent_name")},
|
|
2445
|
+
)
|
|
2446
|
+
|
|
2447
|
+
await self._publish_status_update_with_buffer_flush(
|
|
2448
|
+
status_update_event,
|
|
2449
|
+
a2a_context,
|
|
2450
|
+
skip_buffer_flush=False,
|
|
2451
|
+
)
|
|
2452
|
+
|
|
2453
|
+
log.debug(
|
|
2454
|
+
"%s Published tool failure status update.",
|
|
2455
|
+
log_identifier_helper,
|
|
2456
|
+
)
|
|
2457
|
+
|
|
2458
|
+
except Exception as e:
|
|
2459
|
+
log.error(
|
|
2460
|
+
"%s Failed to publish intermediate tool failure status: %s",
|
|
2461
|
+
log_identifier_helper,
|
|
2462
|
+
e,
|
|
2463
|
+
)
|
|
2464
|
+
|
|
2465
|
+
async def _repair_session_history_on_error(
|
|
2466
|
+
self, exception: Exception, a2a_context: Dict
|
|
2467
|
+
):
|
|
2468
|
+
"""
|
|
2469
|
+
Reactively repairs the session history if the last event was a tool call.
|
|
2470
|
+
This is "the belt" in the belt-and-suspenders strategy.
|
|
2471
|
+
"""
|
|
2472
|
+
log_identifier = f"{self.log_identifier}[HistoryRepair]"
|
|
2473
|
+
try:
|
|
2474
|
+
from ...agent.adk.callbacks import create_dangling_tool_call_repair_content
|
|
2475
|
+
|
|
2476
|
+
session_id = a2a_context.get("effective_session_id")
|
|
2477
|
+
user_id = a2a_context.get("user_id")
|
|
2478
|
+
agent_name = self.get_config("agent_name")
|
|
2479
|
+
|
|
2480
|
+
if not all([session_id, user_id, agent_name, self.session_service]):
|
|
2481
|
+
log.warning(
|
|
2482
|
+
"%s Skipping history repair due to missing context.", log_identifier
|
|
2483
|
+
)
|
|
2484
|
+
return
|
|
2485
|
+
|
|
2486
|
+
session = await self.session_service.get_session(
|
|
2487
|
+
app_name=agent_name, user_id=user_id, session_id=session_id
|
|
2488
|
+
)
|
|
2489
|
+
|
|
2490
|
+
if not session or not session.events:
|
|
2491
|
+
log.debug(
|
|
2492
|
+
"%s No session or events found for history repair.", log_identifier
|
|
2493
|
+
)
|
|
2494
|
+
return
|
|
2495
|
+
|
|
2496
|
+
last_event = session.events[-1]
|
|
2497
|
+
function_calls = last_event.get_function_calls()
|
|
2498
|
+
|
|
2499
|
+
if not function_calls:
|
|
2500
|
+
log.debug(
|
|
2501
|
+
"%s Last event was not a function call. No repair needed.",
|
|
2502
|
+
log_identifier,
|
|
2503
|
+
)
|
|
2504
|
+
return
|
|
2505
|
+
|
|
2506
|
+
log.info(
|
|
2507
|
+
"%s Last event contained function_call(s). Repairing session history.",
|
|
2508
|
+
log_identifier,
|
|
2509
|
+
)
|
|
2510
|
+
|
|
2511
|
+
repair_content = create_dangling_tool_call_repair_content(
|
|
2512
|
+
dangling_calls=function_calls,
|
|
2513
|
+
error_message=f"Tool execution failed with an unhandled exception: {str(exception)}",
|
|
2514
|
+
)
|
|
2515
|
+
|
|
2516
|
+
repair_event = ADKEvent(
|
|
2517
|
+
invocation_id=last_event.invocation_id,
|
|
2518
|
+
author=agent_name,
|
|
2519
|
+
content=repair_content,
|
|
2520
|
+
)
|
|
2521
|
+
|
|
2522
|
+
await self.session_service.append_event(session=session, event=repair_event)
|
|
2523
|
+
log.info(
|
|
2524
|
+
"%s Session history repaired successfully with an error function_response.",
|
|
2525
|
+
log_identifier,
|
|
2526
|
+
)
|
|
2527
|
+
|
|
2528
|
+
except Exception as e:
|
|
2529
|
+
log.exception(
|
|
2530
|
+
"%s Critical error during session history repair: %s", log_identifier, e
|
|
2531
|
+
)
|
|
2532
|
+
|
|
2533
|
+
def finalize_task_limit_reached(
|
|
2534
|
+
self, a2a_context: Dict, exception: LlmCallsLimitExceededError
|
|
2535
|
+
):
|
|
2536
|
+
"""
|
|
2537
|
+
Finalizes a task when the LLM call limit is reached, prompting the user to continue.
|
|
2538
|
+
Sends a COMPLETED status with an informative message.
|
|
2539
|
+
"""
|
|
2540
|
+
logical_task_id = a2a_context.get("logical_task_id")
|
|
2541
|
+
|
|
2542
|
+
# Retrieve the original Solace message from TaskExecutionContext
|
|
2543
|
+
original_message: Optional[SolaceMessage] = None
|
|
2544
|
+
with self.active_tasks_lock:
|
|
2545
|
+
task_context = self.active_tasks.get(logical_task_id)
|
|
2546
|
+
if task_context:
|
|
2547
|
+
original_message = task_context.get_original_solace_message()
|
|
2548
|
+
|
|
2549
|
+
log.info(
|
|
2550
|
+
"%s Finalizing task %s as COMPLETED (LLM call limit reached).",
|
|
2551
|
+
self.log_identifier,
|
|
2552
|
+
logical_task_id,
|
|
2553
|
+
)
|
|
2554
|
+
try:
|
|
2555
|
+
jsonrpc_request_id = a2a_context.get("jsonrpc_request_id")
|
|
2556
|
+
client_id = a2a_context.get("client_id")
|
|
2557
|
+
peer_reply_topic = a2a_context.get("replyToTopic")
|
|
2558
|
+
namespace = self.get_config("namespace")
|
|
2559
|
+
agent_name = self.get_config("agent_name")
|
|
2560
|
+
original_session_id = a2a_context.get("session_id")
|
|
2561
|
+
|
|
2562
|
+
limit_message_text = (
|
|
2563
|
+
f"This interaction has reached its processing limit. "
|
|
2564
|
+
"If you'd like to continue this conversation, please type 'continue'. "
|
|
2565
|
+
"Otherwise, you can start a new topic."
|
|
2566
|
+
)
|
|
2567
|
+
|
|
2568
|
+
final_response = a2a.create_internal_error_response(
|
|
2569
|
+
message=limit_message_text,
|
|
2570
|
+
request_id=jsonrpc_request_id,
|
|
2571
|
+
data={"taskId": logical_task_id, "reason": "llm_call_limit_reached"},
|
|
2572
|
+
)
|
|
2573
|
+
a2a_payload = final_response.model_dump(exclude_none=True)
|
|
2574
|
+
|
|
2575
|
+
target_topic = peer_reply_topic or a2a.get_client_response_topic(
|
|
2576
|
+
namespace, client_id
|
|
2577
|
+
)
|
|
2578
|
+
|
|
2579
|
+
self._publish_a2a_event(a2a_payload, target_topic, a2a_context)
|
|
2580
|
+
log.info(
|
|
2581
|
+
"%s Published ERROR response for task %s to %s (LLM limit reached, user guided to continue).",
|
|
2582
|
+
self.log_identifier,
|
|
2583
|
+
logical_task_id,
|
|
2584
|
+
target_topic,
|
|
2585
|
+
)
|
|
2586
|
+
|
|
2587
|
+
if original_message:
|
|
2588
|
+
try:
|
|
2589
|
+
original_message.call_acknowledgements()
|
|
2590
|
+
log.info(
|
|
2591
|
+
"%s Called ACK for original message of task %s (LLM limit reached).",
|
|
2592
|
+
self.log_identifier,
|
|
2593
|
+
logical_task_id,
|
|
2594
|
+
)
|
|
2595
|
+
except Exception as ack_e:
|
|
2596
|
+
log.error(
|
|
2597
|
+
"%s Failed to call ACK for task %s (LLM limit reached): %s",
|
|
2598
|
+
self.log_identifier,
|
|
2599
|
+
logical_task_id,
|
|
2600
|
+
ack_e,
|
|
2601
|
+
)
|
|
2602
|
+
else:
|
|
2603
|
+
log.warning(
|
|
2604
|
+
"%s Original Solace message not found in context for task %s (LLM limit reached). Cannot ACK.",
|
|
2605
|
+
self.log_identifier,
|
|
2606
|
+
logical_task_id,
|
|
2607
|
+
)
|
|
2608
|
+
|
|
2609
|
+
except Exception as e:
|
|
2610
|
+
log.exception(
|
|
2611
|
+
"%s Error during COMPLETED (LLM limit) finalization of task %s: %s",
|
|
2612
|
+
self.log_identifier,
|
|
2613
|
+
logical_task_id,
|
|
2614
|
+
e,
|
|
2615
|
+
)
|
|
2616
|
+
self.finalize_task_error(e, a2a_context)
|
|
2617
|
+
|
|
2618
|
+
async def finalize_task_error(self, exception: Exception, a2a_context: Dict):
|
|
2619
|
+
"""
|
|
2620
|
+
Finalizes a task with an error. Publishes a final A2A Task with a FAILED
|
|
2621
|
+
status and NACKs the original message.
|
|
2622
|
+
Called by the background ADK thread wrapper.
|
|
2623
|
+
"""
|
|
2624
|
+
logical_task_id = a2a_context.get("logical_task_id")
|
|
2625
|
+
|
|
2626
|
+
# Retrieve the original Solace message from TaskExecutionContext
|
|
2627
|
+
original_message: Optional[SolaceMessage] = None
|
|
2628
|
+
with self.active_tasks_lock:
|
|
2629
|
+
task_context = self.active_tasks.get(logical_task_id)
|
|
2630
|
+
if task_context:
|
|
2631
|
+
original_message = task_context.get_original_solace_message()
|
|
2632
|
+
|
|
2633
|
+
log.error(
|
|
2634
|
+
"%s Finalizing task %s with error: %s",
|
|
2635
|
+
self.log_identifier,
|
|
2636
|
+
logical_task_id,
|
|
2637
|
+
exception,
|
|
2638
|
+
)
|
|
2639
|
+
try:
|
|
2640
|
+
await self._repair_session_history_on_error(exception, a2a_context)
|
|
2641
|
+
|
|
2642
|
+
await self._publish_tool_failure_status(exception, a2a_context)
|
|
2643
|
+
|
|
2644
|
+
client_id = a2a_context.get("client_id")
|
|
2645
|
+
jsonrpc_request_id = a2a_context.get("jsonrpc_request_id")
|
|
2646
|
+
peer_reply_topic = a2a_context.get("replyToTopic")
|
|
2647
|
+
namespace = self.get_config("namespace")
|
|
2648
|
+
|
|
2649
|
+
failed_status = a2a.create_task_status(
|
|
2650
|
+
state=TaskState.failed,
|
|
2651
|
+
message=a2a.create_agent_text_message(
|
|
2652
|
+
text="An unexpected error occurred during tool execution. Please try your request again. If the problem persists, contact an administrator."
|
|
2653
|
+
),
|
|
2654
|
+
)
|
|
2655
|
+
|
|
2656
|
+
final_task = a2a.create_final_task(
|
|
2657
|
+
task_id=logical_task_id,
|
|
2658
|
+
context_id=a2a_context.get("contextId"),
|
|
2659
|
+
final_status=failed_status,
|
|
2660
|
+
metadata={"agent_name": self.get_config("agent_name")},
|
|
2661
|
+
)
|
|
2662
|
+
|
|
2663
|
+
final_response = a2a.create_success_response(
|
|
2664
|
+
result=final_task, request_id=jsonrpc_request_id
|
|
2665
|
+
)
|
|
2666
|
+
a2a_payload = final_response.model_dump(exclude_none=True)
|
|
2667
|
+
target_topic = peer_reply_topic or a2a.get_client_response_topic(
|
|
2668
|
+
namespace, client_id
|
|
2669
|
+
)
|
|
2670
|
+
|
|
2671
|
+
self._publish_a2a_event(a2a_payload, target_topic, a2a_context)
|
|
2672
|
+
log.info(
|
|
2673
|
+
"%s Published final FAILED Task response for task %s to %s",
|
|
2674
|
+
self.log_identifier,
|
|
2675
|
+
logical_task_id,
|
|
2676
|
+
target_topic,
|
|
2677
|
+
)
|
|
2678
|
+
|
|
2679
|
+
if original_message:
|
|
2680
|
+
try:
|
|
2681
|
+
original_message.call_negative_acknowledgements()
|
|
2682
|
+
log.info(
|
|
2683
|
+
"%s Called NACK for original message of failed task %s.",
|
|
2684
|
+
self.log_identifier,
|
|
2685
|
+
logical_task_id,
|
|
2686
|
+
)
|
|
2687
|
+
except Exception as nack_e:
|
|
2688
|
+
log.error(
|
|
2689
|
+
"%s Failed to call NACK for failed task %s: %s",
|
|
2690
|
+
self.log_identifier,
|
|
2691
|
+
logical_task_id,
|
|
2692
|
+
nack_e,
|
|
2693
|
+
)
|
|
2694
|
+
else:
|
|
2695
|
+
log.warning(
|
|
2696
|
+
"%s Original Solace message not found in context for failed task %s. Cannot NACK.",
|
|
2697
|
+
self.log_identifier,
|
|
2698
|
+
logical_task_id,
|
|
2699
|
+
)
|
|
2700
|
+
|
|
2701
|
+
except Exception as e:
|
|
2702
|
+
log.exception(
|
|
2703
|
+
"%s Error during error finalization of task %s: %s",
|
|
2704
|
+
self.log_identifier,
|
|
2705
|
+
logical_task_id,
|
|
2706
|
+
e,
|
|
2707
|
+
)
|
|
2708
|
+
if original_message:
|
|
2709
|
+
try:
|
|
2710
|
+
original_message.call_negative_acknowledgements()
|
|
2711
|
+
log.warning(
|
|
2712
|
+
"%s Called NACK for task %s during error finalization fallback.",
|
|
2713
|
+
self.log_identifier,
|
|
2714
|
+
logical_task_id,
|
|
2715
|
+
)
|
|
2716
|
+
except Exception as nack_e:
|
|
2717
|
+
log.error(
|
|
2718
|
+
"%s Failed to call NACK for task %s during error finalization fallback: %s",
|
|
2719
|
+
self.log_identifier,
|
|
2720
|
+
logical_task_id,
|
|
2721
|
+
nack_e,
|
|
2722
|
+
)
|
|
2723
|
+
else:
|
|
2724
|
+
log.warning(
|
|
2725
|
+
"%s Original Solace message not found for task %s during error finalization fallback. Cannot NACK.",
|
|
2726
|
+
self.log_identifier,
|
|
2727
|
+
logical_task_id,
|
|
2728
|
+
)
|
|
2729
|
+
|
|
2730
|
+
async def finalize_task_with_cleanup(
|
|
2731
|
+
self, a2a_context: Dict, is_paused: bool, exception: Optional[Exception] = None
|
|
2732
|
+
):
|
|
2733
|
+
"""
|
|
2734
|
+
Centralized async method to finalize a task and perform all necessary cleanup.
|
|
2735
|
+
This is scheduled on the component's event loop to ensure it runs after
|
|
2736
|
+
any pending status updates.
|
|
2737
|
+
|
|
2738
|
+
Args:
|
|
2739
|
+
a2a_context: The context dictionary for the task.
|
|
2740
|
+
is_paused: Boolean indicating if the task is paused for a long-running tool.
|
|
2741
|
+
exception: The exception that occurred, if any.
|
|
2742
|
+
"""
|
|
2743
|
+
logical_task_id = a2a_context.get("logical_task_id", "unknown_task")
|
|
2744
|
+
log_id = f"{self.log_identifier}[FinalizeTask:{logical_task_id}]"
|
|
2745
|
+
log.info(
|
|
2746
|
+
"%s Starting finalization and cleanup. Paused: %s, Exception: %s",
|
|
2747
|
+
log_id,
|
|
2748
|
+
is_paused,
|
|
2749
|
+
type(exception).__name__ if exception else "None",
|
|
2750
|
+
)
|
|
2751
|
+
|
|
2752
|
+
try:
|
|
2753
|
+
if is_paused:
|
|
2754
|
+
log.info(
|
|
2755
|
+
"%s Task is paused for a long-running tool. Skipping finalization logic.",
|
|
2756
|
+
log_id,
|
|
2757
|
+
)
|
|
2758
|
+
else:
|
|
2759
|
+
try:
|
|
2760
|
+
if exception:
|
|
2761
|
+
if isinstance(exception, TaskCancelledError):
|
|
2762
|
+
self.finalize_task_canceled(a2a_context)
|
|
2763
|
+
elif isinstance(exception, LlmCallsLimitExceededError):
|
|
2764
|
+
self.finalize_task_limit_reached(a2a_context, exception)
|
|
2765
|
+
else:
|
|
2766
|
+
await self.finalize_task_error(exception, a2a_context)
|
|
2767
|
+
else:
|
|
2768
|
+
await self.finalize_task_success(a2a_context)
|
|
2769
|
+
except Exception as e:
|
|
2770
|
+
log.exception(
|
|
2771
|
+
"%s An unexpected error occurred during the finalization logic itself: %s",
|
|
2772
|
+
log_id,
|
|
2773
|
+
e,
|
|
2774
|
+
)
|
|
2775
|
+
# Retrieve the original Solace message from TaskExecutionContext for fallback NACK
|
|
2776
|
+
original_message: Optional[SolaceMessage] = None
|
|
2777
|
+
with self.active_tasks_lock:
|
|
2778
|
+
task_context = self.active_tasks.get(logical_task_id)
|
|
2779
|
+
if task_context:
|
|
2780
|
+
original_message = (
|
|
2781
|
+
task_context.get_original_solace_message()
|
|
2782
|
+
)
|
|
2783
|
+
|
|
2784
|
+
if original_message:
|
|
2785
|
+
try:
|
|
2786
|
+
original_message.call_negative_acknowledgements()
|
|
2787
|
+
except Exception as nack_err:
|
|
2788
|
+
log.error(
|
|
2789
|
+
"%s Fallback NACK failed during finalization error: %s",
|
|
2790
|
+
log_id,
|
|
2791
|
+
nack_err,
|
|
2792
|
+
)
|
|
2793
|
+
finally:
|
|
2794
|
+
if not is_paused:
|
|
2795
|
+
# Cleanup for RUN_BASED sessions remains, as it's a service-level concern
|
|
2796
|
+
if a2a_context.get("is_run_based_session"):
|
|
2797
|
+
temp_session_id_to_delete = a2a_context.get(
|
|
2798
|
+
"temporary_run_session_id_for_cleanup"
|
|
2799
|
+
)
|
|
2800
|
+
agent_name_for_session = a2a_context.get("agent_name_for_session")
|
|
2801
|
+
user_id_for_session = a2a_context.get("user_id_for_session")
|
|
2802
|
+
|
|
2803
|
+
if (
|
|
2804
|
+
temp_session_id_to_delete
|
|
2805
|
+
and agent_name_for_session
|
|
2806
|
+
and user_id_for_session
|
|
2807
|
+
):
|
|
2808
|
+
log.info(
|
|
2809
|
+
"%s Cleaning up RUN_BASED session (app: %s, user: %s, id: %s) from shared service for task_id='%s'",
|
|
2810
|
+
log_id,
|
|
2811
|
+
agent_name_for_session,
|
|
2812
|
+
user_id_for_session,
|
|
2813
|
+
temp_session_id_to_delete,
|
|
2814
|
+
logical_task_id,
|
|
2815
|
+
)
|
|
2816
|
+
try:
|
|
2817
|
+
if self.session_service:
|
|
2818
|
+
await self.session_service.delete_session(
|
|
2819
|
+
app_name=agent_name_for_session,
|
|
2820
|
+
user_id=user_id_for_session,
|
|
2821
|
+
session_id=temp_session_id_to_delete,
|
|
2822
|
+
)
|
|
2823
|
+
else:
|
|
2824
|
+
log.error(
|
|
2825
|
+
"%s self.session_service is None, cannot delete RUN_BASED session %s.",
|
|
2826
|
+
log_id,
|
|
2827
|
+
temp_session_id_to_delete,
|
|
2828
|
+
)
|
|
2829
|
+
except AttributeError:
|
|
2830
|
+
log.error(
|
|
2831
|
+
"%s self.session_service does not support 'delete_session'. Cleanup for RUN_BASED session (app: %s, user: %s, id: %s) skipped.",
|
|
2832
|
+
log_id,
|
|
2833
|
+
agent_name_for_session,
|
|
2834
|
+
user_id_for_session,
|
|
2835
|
+
temp_session_id_to_delete,
|
|
2836
|
+
)
|
|
2837
|
+
except Exception as e_cleanup:
|
|
2838
|
+
log.error(
|
|
2839
|
+
"%s Error cleaning up RUN_BASED session (app: %s, user: %s, id: %s) from shared service: %s",
|
|
2840
|
+
log_id,
|
|
2841
|
+
agent_name_for_session,
|
|
2842
|
+
user_id_for_session,
|
|
2843
|
+
temp_session_id_to_delete,
|
|
2844
|
+
e_cleanup,
|
|
2845
|
+
exc_info=True,
|
|
2846
|
+
)
|
|
2847
|
+
else:
|
|
2848
|
+
log.warning(
|
|
2849
|
+
"%s Could not clean up RUN_BASED session for task %s due to missing context (id_to_delete: %s, agent_name: %s, user_id: %s).",
|
|
2850
|
+
log_id,
|
|
2851
|
+
logical_task_id,
|
|
2852
|
+
temp_session_id_to_delete,
|
|
2853
|
+
agent_name_for_session,
|
|
2854
|
+
user_id_for_session,
|
|
2855
|
+
)
|
|
2856
|
+
|
|
2857
|
+
with self.active_tasks_lock:
|
|
2858
|
+
removed_task_context = self.active_tasks.pop(logical_task_id, None)
|
|
2859
|
+
if removed_task_context:
|
|
2860
|
+
log.debug(
|
|
2861
|
+
"%s Removed TaskExecutionContext for task %s.",
|
|
2862
|
+
log_id,
|
|
2863
|
+
logical_task_id,
|
|
2864
|
+
)
|
|
2865
|
+
else:
|
|
2866
|
+
log.warning(
|
|
2867
|
+
"%s TaskExecutionContext for task %s was already removed.",
|
|
2868
|
+
log_id,
|
|
2869
|
+
logical_task_id,
|
|
2870
|
+
)
|
|
2871
|
+
else:
|
|
2872
|
+
log.info(
|
|
2873
|
+
"%s Task %s is paused for a long-running tool. Skipping all cleanup.",
|
|
2874
|
+
log_id,
|
|
2875
|
+
logical_task_id,
|
|
2876
|
+
)
|
|
2877
|
+
|
|
2878
|
+
log.info(
|
|
2879
|
+
"%s Finalization and cleanup complete for task %s.",
|
|
2880
|
+
log_id,
|
|
2881
|
+
logical_task_id,
|
|
2882
|
+
)
|
|
2883
|
+
|
|
2884
|
+
def _resolve_instruction_provider(
|
|
2885
|
+
self, config_value: Any
|
|
2886
|
+
) -> Union[str, InstructionProvider]:
|
|
2887
|
+
"""Resolves instruction config using helper."""
|
|
2888
|
+
return resolve_instruction_provider(self, config_value)
|
|
2889
|
+
|
|
2890
|
+
def _get_a2a_base_topic(self) -> str:
|
|
2891
|
+
"""Returns the base topic prefix using helper."""
|
|
2892
|
+
return a2a.get_a2a_base_topic(self.namespace)
|
|
2893
|
+
|
|
2894
|
+
def _get_discovery_topic(self) -> str:
|
|
2895
|
+
"""Returns the discovery topic using helper."""
|
|
2896
|
+
return a2a.get_discovery_topic(self.namespace)
|
|
2897
|
+
|
|
2898
|
+
def _get_agent_request_topic(self, agent_id: str) -> str:
|
|
2899
|
+
"""Returns the agent request topic using helper."""
|
|
2900
|
+
return a2a.get_agent_request_topic(self.namespace, agent_id)
|
|
2901
|
+
|
|
2902
|
+
def _get_agent_response_topic(
|
|
2903
|
+
self, delegating_agent_name: str, sub_task_id: str
|
|
2904
|
+
) -> str:
|
|
2905
|
+
"""Returns the agent response topic using helper."""
|
|
2906
|
+
return a2a.get_agent_response_topic(
|
|
2907
|
+
self.namespace, delegating_agent_name, sub_task_id
|
|
2908
|
+
)
|
|
2909
|
+
|
|
2910
|
+
def _get_peer_agent_status_topic(
|
|
2911
|
+
self, delegating_agent_name: str, sub_task_id: str
|
|
2912
|
+
) -> str:
|
|
2913
|
+
"""Returns the peer agent status topic using helper."""
|
|
2914
|
+
return a2a.get_peer_agent_status_topic(
|
|
2915
|
+
self.namespace, delegating_agent_name, sub_task_id
|
|
2916
|
+
)
|
|
2917
|
+
|
|
2918
|
+
def _get_client_response_topic(self, client_id: str) -> str:
|
|
2919
|
+
"""Returns the client response topic using helper."""
|
|
2920
|
+
return a2a.get_client_response_topic(self.namespace, client_id)
|
|
2921
|
+
|
|
2922
|
+
def _publish_a2a_event(
|
|
2923
|
+
self,
|
|
2924
|
+
payload: Dict,
|
|
2925
|
+
topic: str,
|
|
2926
|
+
a2a_context: Dict,
|
|
2927
|
+
user_properties_override: Optional[Dict] = None,
|
|
2928
|
+
):
|
|
2929
|
+
"""
|
|
2930
|
+
Centralized helper to publish an A2A event, ensuring user properties
|
|
2931
|
+
are consistently attached from the a2a_context or an override.
|
|
2932
|
+
"""
|
|
2933
|
+
if user_properties_override is not None:
|
|
2934
|
+
user_properties = user_properties_override
|
|
2935
|
+
else:
|
|
2936
|
+
user_properties = {}
|
|
2937
|
+
if a2a_context.get("a2a_user_config"):
|
|
2938
|
+
user_properties["a2aUserConfig"] = a2a_context["a2a_user_config"]
|
|
2939
|
+
|
|
2940
|
+
self.publish_a2a_message(payload, topic, user_properties)
|
|
2941
|
+
|
|
2942
|
+
def submit_a2a_task(
|
|
2943
|
+
self,
|
|
2944
|
+
target_agent_name: str,
|
|
2945
|
+
a2a_message: A2AMessage,
|
|
2946
|
+
user_id: str,
|
|
2947
|
+
user_config: Dict[str, Any],
|
|
2948
|
+
sub_task_id: str,
|
|
2949
|
+
) -> str:
|
|
2950
|
+
"""
|
|
2951
|
+
Submits a task to a peer agent in a non-blocking way.
|
|
2952
|
+
Returns the sub_task_id for correlation.
|
|
2953
|
+
"""
|
|
2954
|
+
log_identifier_helper = (
|
|
2955
|
+
f"{self.log_identifier}[SubmitA2ATask:{target_agent_name}]"
|
|
2956
|
+
)
|
|
2957
|
+
main_task_id = a2a_message.metadata.get("parentTaskId", "unknown_parent")
|
|
2958
|
+
log.debug(
|
|
2959
|
+
"%s Submitting non-blocking task for main task %s",
|
|
2960
|
+
log_identifier_helper,
|
|
2961
|
+
main_task_id,
|
|
2962
|
+
)
|
|
2963
|
+
|
|
2964
|
+
# Validate agent access is allowed
|
|
2965
|
+
validate_agent_access(
|
|
2966
|
+
user_config=user_config,
|
|
2967
|
+
target_agent_name=target_agent_name,
|
|
2968
|
+
validation_context={
|
|
2969
|
+
"delegating_agent": self.get_config("agent_name"),
|
|
2970
|
+
"source": "agent_delegation",
|
|
2971
|
+
},
|
|
2972
|
+
log_identifier=log_identifier_helper,
|
|
2973
|
+
)
|
|
2974
|
+
|
|
2975
|
+
peer_request_topic = self._get_agent_request_topic(target_agent_name)
|
|
2976
|
+
|
|
2977
|
+
# Create a compliant SendMessageRequest
|
|
2978
|
+
send_params = MessageSendParams(message=a2a_message)
|
|
2979
|
+
a2a_request = SendMessageRequest(id=sub_task_id, params=send_params)
|
|
2980
|
+
|
|
2981
|
+
delegating_agent_name = self.get_config("agent_name")
|
|
2982
|
+
reply_to_topic = self._get_agent_response_topic(
|
|
2983
|
+
delegating_agent_name=delegating_agent_name,
|
|
2984
|
+
sub_task_id=sub_task_id,
|
|
2985
|
+
)
|
|
2986
|
+
status_topic = self._get_peer_agent_status_topic(
|
|
2987
|
+
delegating_agent_name=delegating_agent_name,
|
|
2988
|
+
sub_task_id=sub_task_id,
|
|
2989
|
+
)
|
|
2990
|
+
|
|
2991
|
+
user_properties = {
|
|
2992
|
+
"replyTo": reply_to_topic,
|
|
2993
|
+
"a2aStatusTopic": status_topic,
|
|
2994
|
+
"userId": user_id,
|
|
2995
|
+
"delegating_agent_name": delegating_agent_name,
|
|
2996
|
+
}
|
|
2997
|
+
if isinstance(user_config, dict):
|
|
2998
|
+
user_properties["a2aUserConfig"] = user_config
|
|
2999
|
+
|
|
3000
|
+
# Retrieve and propagate authentication token from parent task context
|
|
3001
|
+
parent_task_id = a2a_message.metadata.get("parentTaskId")
|
|
3002
|
+
if parent_task_id:
|
|
3003
|
+
with self.active_tasks_lock:
|
|
3004
|
+
parent_task_context = self.active_tasks.get(parent_task_id)
|
|
3005
|
+
|
|
3006
|
+
if parent_task_context:
|
|
3007
|
+
auth_token = parent_task_context.get_security_data("auth_token")
|
|
3008
|
+
if auth_token:
|
|
3009
|
+
user_properties["authToken"] = auth_token
|
|
3010
|
+
log.debug(
|
|
3011
|
+
"%s Propagating authentication token to peer agent %s for sub-task %s",
|
|
3012
|
+
log_identifier_helper,
|
|
3013
|
+
target_agent_name,
|
|
3014
|
+
sub_task_id,
|
|
3015
|
+
)
|
|
3016
|
+
else:
|
|
3017
|
+
log.debug(
|
|
3018
|
+
"%s No authentication token found in parent task context for sub-task %s",
|
|
3019
|
+
log_identifier_helper,
|
|
3020
|
+
sub_task_id,
|
|
3021
|
+
)
|
|
3022
|
+
else:
|
|
3023
|
+
log.warning(
|
|
3024
|
+
"%s Parent task context not found for task %s, cannot propagate authentication token",
|
|
3025
|
+
log_identifier_helper,
|
|
3026
|
+
parent_task_id,
|
|
3027
|
+
)
|
|
3028
|
+
|
|
3029
|
+
self.publish_a2a_message(
|
|
3030
|
+
payload=a2a_request.model_dump(by_alias=True, exclude_none=True),
|
|
3031
|
+
topic=peer_request_topic,
|
|
3032
|
+
user_properties=user_properties,
|
|
3033
|
+
)
|
|
3034
|
+
log.info(
|
|
3035
|
+
"%s Published delegation request to %s (Sub-Task ID: %s, ReplyTo: %s, StatusTo: %s)",
|
|
3036
|
+
log_identifier_helper,
|
|
3037
|
+
peer_request_topic,
|
|
3038
|
+
sub_task_id,
|
|
3039
|
+
reply_to_topic,
|
|
3040
|
+
status_topic,
|
|
3041
|
+
)
|
|
3042
|
+
|
|
3043
|
+
return sub_task_id
|
|
3044
|
+
|
|
3045
|
+
def _handle_scheduled_task_completion(
|
|
3046
|
+
self, future: concurrent.futures.Future, event_type_for_log: EventType
|
|
3047
|
+
):
|
|
3048
|
+
"""Callback to handle completion of futures from run_coroutine_threadsafe."""
|
|
3049
|
+
try:
|
|
3050
|
+
if future.cancelled():
|
|
3051
|
+
log.warning(
|
|
3052
|
+
"%s Coroutine for event type %s (scheduled via run_coroutine_threadsafe) was cancelled.",
|
|
3053
|
+
self.log_identifier,
|
|
3054
|
+
event_type_for_log,
|
|
3055
|
+
)
|
|
3056
|
+
elif future.done() and future.exception() is not None:
|
|
3057
|
+
exception = future.exception()
|
|
3058
|
+
log.error(
|
|
3059
|
+
"%s Coroutine for event type %s (scheduled via run_coroutine_threadsafe) failed with exception: %s",
|
|
3060
|
+
self.log_identifier,
|
|
3061
|
+
event_type_for_log,
|
|
3062
|
+
exception,
|
|
3063
|
+
exc_info=exception,
|
|
3064
|
+
)
|
|
3065
|
+
else:
|
|
3066
|
+
pass
|
|
3067
|
+
except Exception as e:
|
|
3068
|
+
log.error(
|
|
3069
|
+
"%s Error during _handle_scheduled_task_completion (for run_coroutine_threadsafe future) for event type %s: %s",
|
|
3070
|
+
self.log_identifier,
|
|
3071
|
+
event_type_for_log,
|
|
3072
|
+
e,
|
|
3073
|
+
exc_info=e,
|
|
3074
|
+
)
|
|
3075
|
+
|
|
3076
|
+
async def _perform_async_init(self):
|
|
3077
|
+
"""Coroutine executed on the dedicated loop to perform async initialization."""
|
|
3078
|
+
try:
|
|
3079
|
+
log.info(
|
|
3080
|
+
"%s Loading tools asynchronously in dedicated thread...",
|
|
3081
|
+
self.log_identifier,
|
|
3082
|
+
)
|
|
3083
|
+
(
|
|
3084
|
+
loaded_tools,
|
|
3085
|
+
enabled_builtin_tools,
|
|
3086
|
+
self._tool_cleanup_hooks,
|
|
3087
|
+
) = await load_adk_tools(self)
|
|
3088
|
+
log.info(
|
|
3089
|
+
"%s Initializing ADK Agent/Runner asynchronously in dedicated thread...",
|
|
3090
|
+
self.log_identifier,
|
|
3091
|
+
)
|
|
3092
|
+
self.adk_agent = initialize_adk_agent(
|
|
3093
|
+
self, loaded_tools, enabled_builtin_tools
|
|
3094
|
+
)
|
|
3095
|
+
self.runner = initialize_adk_runner(self)
|
|
3096
|
+
|
|
3097
|
+
log.info("%s Populating agent card tool manifest...", self.log_identifier)
|
|
3098
|
+
tool_manifest = []
|
|
3099
|
+
for tool in loaded_tools:
|
|
3100
|
+
if isinstance(tool, MCPToolset):
|
|
3101
|
+
try:
|
|
3102
|
+
log.debug(
|
|
3103
|
+
"%s Retrieving tools from MCPToolset for Agent %s...",
|
|
3104
|
+
self.log_identifier,
|
|
3105
|
+
self.agent_name,
|
|
3106
|
+
)
|
|
3107
|
+
mcp_tools = await tool.get_tools()
|
|
3108
|
+
except Exception as e:
|
|
3109
|
+
log.error(
|
|
3110
|
+
"%s Error retrieving tools from MCPToolset for Agent Card %s: %s",
|
|
3111
|
+
self.log_identifier,
|
|
3112
|
+
self.agent_name,
|
|
3113
|
+
e,
|
|
3114
|
+
)
|
|
3115
|
+
continue
|
|
3116
|
+
for mcp_tool in mcp_tools:
|
|
3117
|
+
tool_manifest.append(
|
|
3118
|
+
{
|
|
3119
|
+
"id": mcp_tool.name,
|
|
3120
|
+
"name": mcp_tool.name,
|
|
3121
|
+
"description": mcp_tool.description
|
|
3122
|
+
or "No description available.",
|
|
3123
|
+
}
|
|
3124
|
+
)
|
|
3125
|
+
elif isinstance(tool, OpenAPIToolset):
|
|
3126
|
+
try:
|
|
3127
|
+
log.debug(
|
|
3128
|
+
"%s Retrieving tools from OpenAPIToolset for Agent %s...",
|
|
3129
|
+
self.log_identifier,
|
|
3130
|
+
self.agent_name,
|
|
3131
|
+
)
|
|
3132
|
+
openapi_tools = await tool.get_tools()
|
|
3133
|
+
except Exception as e:
|
|
3134
|
+
log.error(
|
|
3135
|
+
"%s Error retrieving tools from OpenAPIToolset for Agent Card %s: %s",
|
|
3136
|
+
self.log_identifier,
|
|
3137
|
+
self.agent_name,
|
|
3138
|
+
e,
|
|
3139
|
+
)
|
|
3140
|
+
continue
|
|
3141
|
+
for openapi_tool in openapi_tools:
|
|
3142
|
+
tool_manifest.append(
|
|
3143
|
+
{
|
|
3144
|
+
"id": openapi_tool.name,
|
|
3145
|
+
"name": openapi_tool.name,
|
|
3146
|
+
"description": openapi_tool.description
|
|
3147
|
+
or "No description available.",
|
|
3148
|
+
}
|
|
3149
|
+
)
|
|
3150
|
+
else:
|
|
3151
|
+
tool_name = getattr(tool, "name", getattr(tool, "__name__", None))
|
|
3152
|
+
if tool_name is not None:
|
|
3153
|
+
tool_manifest.append(
|
|
3154
|
+
{
|
|
3155
|
+
"id": tool_name,
|
|
3156
|
+
"name": tool_name,
|
|
3157
|
+
"description": getattr(
|
|
3158
|
+
tool, "description", getattr(tool, "__doc__", None)
|
|
3159
|
+
)
|
|
3160
|
+
or "No description available.",
|
|
3161
|
+
}
|
|
3162
|
+
)
|
|
3163
|
+
|
|
3164
|
+
self.agent_card_tool_manifest = tool_manifest
|
|
3165
|
+
log.info(
|
|
3166
|
+
"%s Agent card tool manifest populated with %d tools.",
|
|
3167
|
+
self.log_identifier,
|
|
3168
|
+
len(self.agent_card_tool_manifest),
|
|
3169
|
+
)
|
|
3170
|
+
|
|
3171
|
+
log.info(
|
|
3172
|
+
"%s Async initialization steps complete in dedicated thread.",
|
|
3173
|
+
self.log_identifier,
|
|
3174
|
+
)
|
|
3175
|
+
if self._async_init_future and not self._async_init_future.done():
|
|
3176
|
+
log.info(
|
|
3177
|
+
"%s _perform_async_init: Signaling success to main thread.",
|
|
3178
|
+
self.log_identifier,
|
|
3179
|
+
)
|
|
3180
|
+
self._async_loop.call_soon_threadsafe(
|
|
3181
|
+
self._async_init_future.set_result, True
|
|
3182
|
+
)
|
|
3183
|
+
else:
|
|
3184
|
+
log.warning(
|
|
3185
|
+
"%s _perform_async_init: _async_init_future is None or already done before signaling success.",
|
|
3186
|
+
self.log_identifier,
|
|
3187
|
+
)
|
|
3188
|
+
except Exception as e:
|
|
3189
|
+
log.exception(
|
|
3190
|
+
"%s _perform_async_init: Error during async initialization in dedicated thread: %s",
|
|
3191
|
+
self.log_identifier,
|
|
3192
|
+
e,
|
|
3193
|
+
)
|
|
3194
|
+
if self._async_init_future and not self._async_init_future.done():
|
|
3195
|
+
log.error(
|
|
3196
|
+
"%s _perform_async_init: Signaling failure to main thread.",
|
|
3197
|
+
self.log_identifier,
|
|
3198
|
+
)
|
|
3199
|
+
self._async_loop.call_soon_threadsafe(
|
|
3200
|
+
self._async_init_future.set_exception, e
|
|
3201
|
+
)
|
|
3202
|
+
else:
|
|
3203
|
+
log.warning(
|
|
3204
|
+
"%s _perform_async_init: _async_init_future is None or already done before signaling failure.",
|
|
3205
|
+
self.log_identifier,
|
|
3206
|
+
)
|
|
3207
|
+
raise e
|
|
3208
|
+
|
|
3209
|
+
def cleanup(self):
|
|
3210
|
+
"""Clean up resources on component shutdown."""
|
|
3211
|
+
log.info("%s Cleaning up A2A ADK Host Component.", self.log_identifier)
|
|
3212
|
+
self.cancel_timer(self._card_publish_timer_id)
|
|
3213
|
+
self.cancel_timer(self.HEALTH_CHECK_TIMER_ID)
|
|
3214
|
+
|
|
3215
|
+
cleanup_func_details = self.get_config("agent_cleanup_function")
|
|
3216
|
+
|
|
3217
|
+
from .app import AgentInitCleanupConfig # Avoid circular import
|
|
3218
|
+
|
|
3219
|
+
if cleanup_func_details and isinstance(
|
|
3220
|
+
cleanup_func_details, AgentInitCleanupConfig
|
|
3221
|
+
):
|
|
3222
|
+
module_name = cleanup_func_details.get("module")
|
|
3223
|
+
func_name = cleanup_func_details.get("name")
|
|
3224
|
+
base_path = cleanup_func_details.get("base_path")
|
|
3225
|
+
|
|
3226
|
+
if module_name and func_name:
|
|
3227
|
+
log.info(
|
|
3228
|
+
"%s Attempting to load and execute cleanup_function: %s.%s",
|
|
3229
|
+
self.log_identifier,
|
|
3230
|
+
module_name,
|
|
3231
|
+
func_name,
|
|
3232
|
+
)
|
|
3233
|
+
try:
|
|
3234
|
+
module = import_module(module_name, base_path=base_path)
|
|
3235
|
+
cleanup_function = getattr(module, func_name)
|
|
3236
|
+
|
|
3237
|
+
if not callable(cleanup_function):
|
|
3238
|
+
log.error(
|
|
3239
|
+
"%s Cleanup function '%s' in module '%s' is not callable. Skipping.",
|
|
3240
|
+
self.log_identifier,
|
|
3241
|
+
func_name,
|
|
3242
|
+
module_name,
|
|
3243
|
+
)
|
|
3244
|
+
else:
|
|
3245
|
+
cleanup_function(self)
|
|
3246
|
+
log.info(
|
|
3247
|
+
"%s Successfully executed cleanup_function: %s.%s",
|
|
3248
|
+
self.log_identifier,
|
|
3249
|
+
module_name,
|
|
3250
|
+
func_name,
|
|
3251
|
+
)
|
|
3252
|
+
except Exception as e:
|
|
3253
|
+
log.exception(
|
|
3254
|
+
"%s Error during agent cleanup via cleanup_function '%s.%s': %s",
|
|
3255
|
+
self.log_identifier,
|
|
3256
|
+
module_name,
|
|
3257
|
+
func_name,
|
|
3258
|
+
e,
|
|
3259
|
+
)
|
|
3260
|
+
if self._tool_cleanup_hooks:
|
|
3261
|
+
log.info(
|
|
3262
|
+
"%s Executing %d tool cleanup hooks...",
|
|
3263
|
+
self.log_identifier,
|
|
3264
|
+
len(self._tool_cleanup_hooks),
|
|
3265
|
+
)
|
|
3266
|
+
if self._async_loop and self._async_loop.is_running():
|
|
3267
|
+
|
|
3268
|
+
async def run_tool_cleanup():
|
|
3269
|
+
results = await asyncio.gather(
|
|
3270
|
+
*[hook() for hook in self._tool_cleanup_hooks],
|
|
3271
|
+
return_exceptions=True,
|
|
3272
|
+
)
|
|
3273
|
+
for i, result in enumerate(results):
|
|
3274
|
+
if isinstance(result, Exception):
|
|
3275
|
+
log.error(
|
|
3276
|
+
"%s Error during tool cleanup hook #%d: %s",
|
|
3277
|
+
self.log_identifier,
|
|
3278
|
+
i,
|
|
3279
|
+
result,
|
|
3280
|
+
exc_info=result,
|
|
3281
|
+
)
|
|
3282
|
+
|
|
3283
|
+
future = asyncio.run_coroutine_threadsafe(
|
|
3284
|
+
run_tool_cleanup(), self._async_loop
|
|
3285
|
+
)
|
|
3286
|
+
try:
|
|
3287
|
+
future.result(timeout=15) # Wait for cleanup to complete
|
|
3288
|
+
log.info("%s All tool cleanup hooks executed.", self.log_identifier)
|
|
3289
|
+
except Exception as e:
|
|
3290
|
+
log.error(
|
|
3291
|
+
"%s Exception while waiting for tool cleanup hooks to finish: %s",
|
|
3292
|
+
self.log_identifier,
|
|
3293
|
+
e,
|
|
3294
|
+
)
|
|
3295
|
+
else:
|
|
3296
|
+
log.warning(
|
|
3297
|
+
"%s Cannot execute tool cleanup hooks because the async loop is not running.",
|
|
3298
|
+
self.log_identifier,
|
|
3299
|
+
)
|
|
3300
|
+
|
|
3301
|
+
# The base class cleanup() will handle stopping the async loop and joining the thread.
|
|
3302
|
+
# We just need to cancel any active tasks before that happens.
|
|
3303
|
+
with self.active_tasks_lock:
|
|
3304
|
+
if self._async_loop and self._async_loop.is_running():
|
|
3305
|
+
for task_context in self.active_tasks.values():
|
|
3306
|
+
task_context.cancel()
|
|
3307
|
+
self.active_tasks.clear()
|
|
3308
|
+
log.debug("%s Cleared all active tasks.", self.log_identifier)
|
|
3309
|
+
|
|
3310
|
+
super().cleanup()
|
|
3311
|
+
log.info("%s Component cleanup finished.", self.log_identifier)
|
|
3312
|
+
|
|
3313
|
+
def set_agent_specific_state(self, key: str, value: Any):
|
|
3314
|
+
"""
|
|
3315
|
+
Sets a key-value pair in the agent-specific state.
|
|
3316
|
+
Intended to be used by the custom init_function.
|
|
3317
|
+
"""
|
|
3318
|
+
if not hasattr(self, "agent_specific_state"):
|
|
3319
|
+
self.agent_specific_state = {}
|
|
3320
|
+
self.agent_specific_state[key] = value
|
|
3321
|
+
log.debug("%s Set agent_specific_state['%s']", self.log_identifier, key)
|
|
3322
|
+
|
|
3323
|
+
def get_agent_specific_state(self, key: str, default: Optional[Any] = None) -> Any:
|
|
3324
|
+
"""
|
|
3325
|
+
Gets a value from the agent-specific state.
|
|
3326
|
+
Intended to be used by tools and the custom cleanup_function.
|
|
3327
|
+
"""
|
|
3328
|
+
if not hasattr(self, "agent_specific_state"):
|
|
3329
|
+
return default
|
|
3330
|
+
return self.agent_specific_state.get(key, default)
|
|
3331
|
+
|
|
3332
|
+
def get_async_loop(self) -> Optional[asyncio.AbstractEventLoop]:
|
|
3333
|
+
"""Returns the dedicated asyncio event loop for this component's async tasks."""
|
|
3334
|
+
return self._async_loop
|
|
3335
|
+
|
|
3336
|
+
def set_agent_system_instruction_string(self, instruction_string: str) -> None:
|
|
3337
|
+
"""
|
|
3338
|
+
Sets a static string to be injected into the LLM system prompt.
|
|
3339
|
+
Called by the agent's init_function.
|
|
3340
|
+
"""
|
|
3341
|
+
if not isinstance(instruction_string, str):
|
|
3342
|
+
log.error(
|
|
3343
|
+
"%s Invalid type for instruction_string: %s. Must be a string.",
|
|
3344
|
+
self.log_identifier,
|
|
3345
|
+
type(instruction_string),
|
|
3346
|
+
)
|
|
3347
|
+
return
|
|
3348
|
+
self._agent_system_instruction_string = instruction_string
|
|
3349
|
+
self._agent_system_instruction_callback = None
|
|
3350
|
+
log.info("%s Static agent system instruction string set.", self.log_identifier)
|
|
3351
|
+
|
|
3352
|
+
def set_agent_system_instruction_callback(
|
|
3353
|
+
self,
|
|
3354
|
+
callback_function: Callable[[CallbackContext, LlmRequest], Optional[str]],
|
|
3355
|
+
) -> None:
|
|
3356
|
+
"""
|
|
3357
|
+
Sets a callback function to dynamically generate system prompt injections.
|
|
3358
|
+
Called by the agent's init_function.
|
|
3359
|
+
"""
|
|
3360
|
+
if not callable(callback_function):
|
|
3361
|
+
log.error(
|
|
3362
|
+
"%s Invalid type for callback_function: %s. Must be callable.",
|
|
3363
|
+
self.log_identifier,
|
|
3364
|
+
type(callback_function),
|
|
3365
|
+
)
|
|
3366
|
+
return
|
|
3367
|
+
self._agent_system_instruction_callback = callback_function
|
|
3368
|
+
self._agent_system_instruction_string = None
|
|
3369
|
+
log.info("%s Agent system instruction callback set.", self.log_identifier)
|
|
3370
|
+
|
|
3371
|
+
def get_gateway_id(self) -> str:
|
|
3372
|
+
"""
|
|
3373
|
+
Returns a unique identifier for this specific gateway/host instance.
|
|
3374
|
+
For now, using the agent name, but could be made more robust (e.g., hostname + agent name).
|
|
3375
|
+
"""
|
|
3376
|
+
return self.agent_name
|
|
3377
|
+
|
|
3378
|
+
def _check_agent_health(self):
|
|
3379
|
+
"""
|
|
3380
|
+
Checks the health of peer agents and de-registers unresponsive ones.
|
|
3381
|
+
This is called periodically by the health check timer.
|
|
3382
|
+
Uses TTL-based expiration to determine if an agent is unresponsive.
|
|
3383
|
+
"""
|
|
3384
|
+
|
|
3385
|
+
log.debug("%s Performing agent health check...", self.log_identifier)
|
|
3386
|
+
|
|
3387
|
+
ttl_seconds = self.agent_discovery_config.get(
|
|
3388
|
+
"health_check_ttl_seconds", HEALTH_CHECK_TTL_SECONDS
|
|
3389
|
+
)
|
|
3390
|
+
health_check_interval = self.agent_discovery_config.get(
|
|
3391
|
+
"health_check_interval_seconds", HEALTH_CHECK_INTERVAL_SECONDS
|
|
3392
|
+
)
|
|
3393
|
+
|
|
3394
|
+
log.debug(
|
|
3395
|
+
"%s Health check configuration: interval=%d seconds, TTL=%d seconds",
|
|
3396
|
+
self.log_identifier,
|
|
3397
|
+
health_check_interval,
|
|
3398
|
+
ttl_seconds,
|
|
3399
|
+
)
|
|
3400
|
+
|
|
3401
|
+
# Validate configuration values
|
|
3402
|
+
if (
|
|
3403
|
+
ttl_seconds <= 0
|
|
3404
|
+
or health_check_interval <= 0
|
|
3405
|
+
or ttl_seconds < health_check_interval
|
|
3406
|
+
):
|
|
3407
|
+
log.error(
|
|
3408
|
+
"%s agent_health_check_ttl_seconds (%d) and agent_health_check_interval_seconds (%d) must be positive and TTL must be greater than interval.",
|
|
3409
|
+
self.log_identifier,
|
|
3410
|
+
ttl_seconds,
|
|
3411
|
+
health_check_interval,
|
|
3412
|
+
)
|
|
3413
|
+
raise ValueError(
|
|
3414
|
+
f"Invalid health check configuration. agent_health_check_ttl_seconds ({ttl_seconds}) and agent_health_check_interval_seconds ({health_check_interval}) must be positive and TTL must be greater than interval."
|
|
3415
|
+
)
|
|
3416
|
+
|
|
3417
|
+
# Get all agent names from the registry
|
|
3418
|
+
agent_names = self.agent_registry.get_agent_names()
|
|
3419
|
+
total_agents = len(agent_names)
|
|
3420
|
+
agents_to_deregister = []
|
|
3421
|
+
|
|
3422
|
+
log.debug(
|
|
3423
|
+
"%s Checking health of %d peer agents", self.log_identifier, total_agents
|
|
3424
|
+
)
|
|
3425
|
+
|
|
3426
|
+
for agent_name in agent_names:
|
|
3427
|
+
# Skip our own agent
|
|
3428
|
+
if agent_name == self.agent_name:
|
|
3429
|
+
continue
|
|
3430
|
+
|
|
3431
|
+
# Check if the agent's TTL has expired
|
|
3432
|
+
is_expired, time_since_last_seen = self.agent_registry.check_ttl_expired(
|
|
3433
|
+
agent_name, ttl_seconds
|
|
3434
|
+
)
|
|
3435
|
+
|
|
3436
|
+
if is_expired:
|
|
3437
|
+
log.warning(
|
|
3438
|
+
"%s Agent '%s' TTL has expired. De-registering. Time since last seen: %d seconds (TTL: %d seconds)",
|
|
3439
|
+
self.log_identifier,
|
|
3440
|
+
agent_name,
|
|
3441
|
+
time_since_last_seen,
|
|
3442
|
+
ttl_seconds,
|
|
3443
|
+
)
|
|
3444
|
+
agents_to_deregister.append(agent_name)
|
|
3445
|
+
|
|
3446
|
+
# De-register unresponsive agents
|
|
3447
|
+
for agent_name in agents_to_deregister:
|
|
3448
|
+
self._deregister_agent(agent_name)
|
|
3449
|
+
|
|
3450
|
+
log.debug(
|
|
3451
|
+
"%s Agent health check completed. Total agents: %d, De-registered: %d",
|
|
3452
|
+
self.log_identifier,
|
|
3453
|
+
total_agents,
|
|
3454
|
+
len(agents_to_deregister),
|
|
3455
|
+
)
|
|
3456
|
+
|
|
3457
|
+
def _deregister_agent(self, agent_name: str):
|
|
3458
|
+
"""
|
|
3459
|
+
De-registers an agent from the registry and publishes a de-registration event.
|
|
3460
|
+
"""
|
|
3461
|
+
# Remove from registry
|
|
3462
|
+
registry_removed = self.agent_registry.remove_agent(agent_name)
|
|
3463
|
+
|
|
3464
|
+
# Always remove from peer_agents regardless of registry result
|
|
3465
|
+
peer_removed = False
|
|
3466
|
+
if agent_name in self.peer_agents:
|
|
3467
|
+
del self.peer_agents[agent_name]
|
|
3468
|
+
peer_removed = True
|
|
3469
|
+
log.info(
|
|
3470
|
+
"%s Removed agent '%s' from peer_agents dictionary",
|
|
3471
|
+
self.log_identifier,
|
|
3472
|
+
agent_name,
|
|
3473
|
+
)
|
|
3474
|
+
|
|
3475
|
+
# Publish de-registration event if agent was in either data structure
|
|
3476
|
+
if registry_removed or peer_removed:
|
|
3477
|
+
try:
|
|
3478
|
+
# Create a de-registration event topic
|
|
3479
|
+
namespace = self.get_config("namespace")
|
|
3480
|
+
deregistration_topic = f"{namespace}/a2a/events/agent/deregistered"
|
|
3481
|
+
|
|
3482
|
+
current_time = time.time()
|
|
3483
|
+
|
|
3484
|
+
# Create the payload
|
|
3485
|
+
deregistration_payload = {
|
|
3486
|
+
"event_type": "agent.deregistered",
|
|
3487
|
+
"agent_name": agent_name,
|
|
3488
|
+
"reason": "health_check_failure",
|
|
3489
|
+
"metadata": {
|
|
3490
|
+
"timestamp": current_time,
|
|
3491
|
+
"deregistered_by": self.agent_name,
|
|
3492
|
+
},
|
|
3493
|
+
}
|
|
3494
|
+
|
|
3495
|
+
# Publish the event
|
|
3496
|
+
self.publish_a2a_message(
|
|
3497
|
+
payload=deregistration_payload, topic=deregistration_topic
|
|
3498
|
+
)
|
|
3499
|
+
|
|
3500
|
+
log.info(
|
|
3501
|
+
"%s Published de-registration event for agent '%s' to topic '%s'",
|
|
3502
|
+
self.log_identifier,
|
|
3503
|
+
agent_name,
|
|
3504
|
+
deregistration_topic,
|
|
3505
|
+
)
|
|
3506
|
+
except Exception as e:
|
|
3507
|
+
log.error(
|
|
3508
|
+
"%s Failed to publish de-registration event for agent '%s': %s",
|
|
3509
|
+
self.log_identifier,
|
|
3510
|
+
agent_name,
|
|
3511
|
+
e,
|
|
3512
|
+
)
|
|
3513
|
+
|
|
3514
|
+
async def _resolve_early_embeds_and_handle_signals(
|
|
3515
|
+
self, raw_text: str, a2a_context: Dict
|
|
3516
|
+
) -> Tuple[str, List[Tuple[int, Any]], str]:
|
|
3517
|
+
"""
|
|
3518
|
+
Resolves early-stage embeds in raw text and extracts signals.
|
|
3519
|
+
Returns the resolved text, a list of signals, and any unprocessed tail.
|
|
3520
|
+
This is called by process_and_publish_adk_event.
|
|
3521
|
+
"""
|
|
3522
|
+
logical_task_id = a2a_context.get("logical_task_id", "unknown_task")
|
|
3523
|
+
method_context_log_identifier = (
|
|
3524
|
+
f"{self.log_identifier}[ResolveEmbeds:{logical_task_id}]"
|
|
3525
|
+
)
|
|
3526
|
+
log.debug(
|
|
3527
|
+
"%s Resolving early embeds for text (length: %d).",
|
|
3528
|
+
method_context_log_identifier,
|
|
3529
|
+
len(raw_text),
|
|
3530
|
+
)
|
|
3531
|
+
|
|
3532
|
+
original_session_id = a2a_context.get("session_id")
|
|
3533
|
+
user_id = a2a_context.get("user_id")
|
|
3534
|
+
adk_app_name = self.get_config("agent_name")
|
|
3535
|
+
|
|
3536
|
+
if not all([self.artifact_service, original_session_id, user_id, adk_app_name]):
|
|
3537
|
+
log.error(
|
|
3538
|
+
"%s Missing necessary context for embed resolution (artifact_service, session_id, user_id, or adk_app_name). Skipping.",
|
|
3539
|
+
method_context_log_identifier,
|
|
3540
|
+
)
|
|
3541
|
+
return (
|
|
3542
|
+
raw_text,
|
|
3543
|
+
[],
|
|
3544
|
+
"",
|
|
3545
|
+
)
|
|
3546
|
+
context_for_embeds = {
|
|
3547
|
+
"artifact_service": self.artifact_service,
|
|
3548
|
+
"session_context": {
|
|
3549
|
+
"app_name": adk_app_name,
|
|
3550
|
+
"user_id": user_id,
|
|
3551
|
+
"session_id": original_session_id,
|
|
3552
|
+
},
|
|
3553
|
+
"config": {
|
|
3554
|
+
"gateway_max_artifact_resolve_size_bytes": self.get_config(
|
|
3555
|
+
"tool_output_llm_return_max_bytes", 4096
|
|
3556
|
+
),
|
|
3557
|
+
"gateway_recursive_embed_depth": self.get_config(
|
|
3558
|
+
"gateway_recursive_embed_depth", 12
|
|
3559
|
+
),
|
|
3560
|
+
},
|
|
3561
|
+
}
|
|
3562
|
+
|
|
3563
|
+
resolver_config = context_for_embeds["config"]
|
|
3564
|
+
|
|
3565
|
+
try:
|
|
3566
|
+
from ...common.utils.embeds.constants import EARLY_EMBED_TYPES
|
|
3567
|
+
from ...common.utils.embeds.types import ResolutionMode
|
|
3568
|
+
from ...common.utils.embeds.resolver import (
|
|
3569
|
+
evaluate_embed,
|
|
3570
|
+
resolve_embeds_in_string,
|
|
3571
|
+
)
|
|
3572
|
+
|
|
3573
|
+
resolved_text, processed_until_index, signals_found = (
|
|
3574
|
+
await resolve_embeds_in_string(
|
|
3575
|
+
text=raw_text,
|
|
3576
|
+
context=context_for_embeds,
|
|
3577
|
+
resolver_func=evaluate_embed,
|
|
3578
|
+
types_to_resolve=EARLY_EMBED_TYPES,
|
|
3579
|
+
resolution_mode=ResolutionMode.TOOL_PARAMETER,
|
|
3580
|
+
log_identifier=method_context_log_identifier,
|
|
3581
|
+
config=resolver_config,
|
|
3582
|
+
)
|
|
3583
|
+
)
|
|
3584
|
+
unprocessed_tail = raw_text[processed_until_index:]
|
|
3585
|
+
log.debug(
|
|
3586
|
+
"%s Embed resolution complete. Resolved text: '%s...', Signals found: %d, Unprocessed tail: '%s...'",
|
|
3587
|
+
method_context_log_identifier,
|
|
3588
|
+
resolved_text[:100],
|
|
3589
|
+
len(signals_found),
|
|
3590
|
+
unprocessed_tail[:100],
|
|
3591
|
+
)
|
|
3592
|
+
return resolved_text, signals_found, unprocessed_tail
|
|
3593
|
+
except Exception as e:
|
|
3594
|
+
log.exception(
|
|
3595
|
+
"%s Error during embed resolution: %s", method_context_log_identifier, e
|
|
3596
|
+
)
|
|
3597
|
+
return raw_text, [], ""
|
|
3598
|
+
|
|
3599
|
+
def _publish_agent_card(self) -> None:
|
|
3600
|
+
"""
|
|
3601
|
+
Schedules periodic publishing of the agent card based on configuration.
|
|
3602
|
+
"""
|
|
3603
|
+
try:
|
|
3604
|
+
publish_interval_sec = self.agent_card_publishing_config.get(
|
|
3605
|
+
"interval_seconds"
|
|
3606
|
+
)
|
|
3607
|
+
if publish_interval_sec and publish_interval_sec > 0:
|
|
3608
|
+
log.info(
|
|
3609
|
+
"%s Scheduling agent card publishing every %d seconds.",
|
|
3610
|
+
self.log_identifier,
|
|
3611
|
+
publish_interval_sec,
|
|
3612
|
+
)
|
|
3613
|
+
# Register timer with callback
|
|
3614
|
+
self.add_timer(
|
|
3615
|
+
delay_ms=1000,
|
|
3616
|
+
timer_id=self._card_publish_timer_id,
|
|
3617
|
+
interval_ms=publish_interval_sec * 1000,
|
|
3618
|
+
callback=lambda timer_data: publish_agent_card(self),
|
|
3619
|
+
)
|
|
3620
|
+
else:
|
|
3621
|
+
log.warning(
|
|
3622
|
+
"%s Agent card publishing interval not configured or invalid, card will not be published periodically.",
|
|
3623
|
+
self.log_identifier,
|
|
3624
|
+
)
|
|
3625
|
+
except Exception as e:
|
|
3626
|
+
log.exception(
|
|
3627
|
+
"%s Error during _publish_agent_card setup: %s",
|
|
3628
|
+
self.log_identifier,
|
|
3629
|
+
e,
|
|
3630
|
+
)
|
|
3631
|
+
raise e
|
|
3632
|
+
|
|
3633
|
+
async def _async_setup_and_run(self) -> None:
|
|
3634
|
+
"""
|
|
3635
|
+
Main async logic for the agent component.
|
|
3636
|
+
This is called by the base class's `_run_async_operations`.
|
|
3637
|
+
"""
|
|
3638
|
+
try:
|
|
3639
|
+
# Call base class to initialize Trust Manager
|
|
3640
|
+
await super()._async_setup_and_run()
|
|
3641
|
+
|
|
3642
|
+
# Perform agent-specific async initialization
|
|
3643
|
+
await self._perform_async_init()
|
|
3644
|
+
|
|
3645
|
+
self._publish_agent_card()
|
|
3646
|
+
|
|
3647
|
+
except Exception as e:
|
|
3648
|
+
log.exception(
|
|
3649
|
+
"%s Error during _async_setup_and_run: %s",
|
|
3650
|
+
self.log_identifier,
|
|
3651
|
+
e,
|
|
3652
|
+
)
|
|
3653
|
+
self.cleanup()
|
|
3654
|
+
raise e
|
|
3655
|
+
|
|
3656
|
+
def _pre_async_cleanup(self) -> None:
|
|
3657
|
+
"""
|
|
3658
|
+
Pre-cleanup actions for the agent component.
|
|
3659
|
+
Called by the base class before stopping the async loop.
|
|
3660
|
+
"""
|
|
3661
|
+
# Cleanup Trust Manager if present (ENTERPRISE FEATURE)
|
|
3662
|
+
if self.trust_manager:
|
|
3663
|
+
try:
|
|
3664
|
+
self.trust_manager.cleanup(self.cancel_timer)
|
|
3665
|
+
except Exception as e:
|
|
3666
|
+
log.error(
|
|
3667
|
+
"%s Error during Trust Manager cleanup: %s", self.log_identifier, e
|
|
3668
|
+
)
|