solace-agent-mesh 1.5.1__py3-none-any.whl → 1.6.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of solace-agent-mesh might be problematic. Click here for more details.
- solace_agent_mesh/agent/adk/callbacks.py +0 -5
- solace_agent_mesh/agent/adk/models/lite_llm.py +123 -8
- solace_agent_mesh/agent/adk/models/oauth2_token_manager.py +245 -0
- solace_agent_mesh/agent/protocol/event_handlers.py +213 -31
- solace_agent_mesh/agent/proxies/__init__.py +0 -0
- solace_agent_mesh/agent/proxies/a2a/__init__.py +3 -0
- solace_agent_mesh/agent/proxies/a2a/app.py +55 -0
- solace_agent_mesh/agent/proxies/a2a/component.py +1115 -0
- solace_agent_mesh/agent/proxies/a2a/config.py +140 -0
- solace_agent_mesh/agent/proxies/a2a/oauth_token_cache.py +104 -0
- solace_agent_mesh/agent/proxies/base/__init__.py +3 -0
- solace_agent_mesh/agent/proxies/base/app.py +99 -0
- solace_agent_mesh/agent/proxies/base/component.py +650 -0
- solace_agent_mesh/agent/proxies/base/config.py +85 -0
- solace_agent_mesh/agent/proxies/base/proxy_task_context.py +17 -0
- solace_agent_mesh/agent/sac/app.py +58 -5
- solace_agent_mesh/agent/sac/component.py +238 -75
- solace_agent_mesh/agent/sac/task_execution_context.py +46 -0
- solace_agent_mesh/agent/tools/audio_tools.py +125 -8
- solace_agent_mesh/agent/tools/web_tools.py +10 -5
- solace_agent_mesh/agent/utils/artifact_helpers.py +141 -3
- solace_agent_mesh/assets/docs/404.html +3 -3
- solace_agent_mesh/assets/docs/assets/js/5c2bd65f.eda4bcb2.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/6ad8f0bd.f4b15f3b.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/71da7b71.38583438.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/77cf947d.48cb18a2.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/924ffdeb.8095e148.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/9e9d0a82.570c057b.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/{ad71b5ed.60668e9e.js → ad71b5ed.af3ecfd1.js} +1 -1
- solace_agent_mesh/assets/docs/assets/js/ceb2a7a6.5d92d7d0.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/{da0b5bad.9d369087.js → da0b5bad.d08a9466.js} +1 -1
- solace_agent_mesh/assets/docs/assets/js/db924877.e98d12a1.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/de915948.27d6b065.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/{e3d9abda.2b916f9e.js → e3d9abda.6b9493d0.js} +1 -1
- solace_agent_mesh/assets/docs/assets/js/e6f9706b.e74a984d.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/f284c35a.42f59cdd.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/ff4d71f2.15b02f97.js +1 -0
- solace_agent_mesh/assets/docs/assets/js/{main.bd3c34f3.js → main.b12eac43.js} +2 -2
- solace_agent_mesh/assets/docs/assets/js/runtime~main.e268214e.js +1 -0
- solace_agent_mesh/assets/docs/docs/documentation/components/agents/index.html +15 -4
- solace_agent_mesh/assets/docs/docs/documentation/components/builtin-tools/artifact-management/index.html +4 -4
- solace_agent_mesh/assets/docs/docs/documentation/components/builtin-tools/audio-tools/index.html +4 -4
- solace_agent_mesh/assets/docs/docs/documentation/components/builtin-tools/data-analysis-tools/index.html +4 -4
- solace_agent_mesh/assets/docs/docs/documentation/components/builtin-tools/embeds/index.html +4 -4
- solace_agent_mesh/assets/docs/docs/documentation/components/builtin-tools/index.html +4 -4
- solace_agent_mesh/assets/docs/docs/documentation/components/cli/index.html +4 -4
- solace_agent_mesh/assets/docs/docs/documentation/components/gateways/index.html +4 -4
- solace_agent_mesh/assets/docs/docs/documentation/components/index.html +4 -4
- solace_agent_mesh/assets/docs/docs/documentation/components/orchestrator/index.html +4 -4
- solace_agent_mesh/assets/docs/docs/documentation/components/plugins/index.html +4 -4
- solace_agent_mesh/assets/docs/docs/documentation/components/proxies/index.html +262 -0
- solace_agent_mesh/assets/docs/docs/documentation/deploying/debugging/index.html +3 -3
- solace_agent_mesh/assets/docs/docs/documentation/deploying/deployment-options/index.html +31 -3
- solace_agent_mesh/assets/docs/docs/documentation/deploying/index.html +3 -3
- solace_agent_mesh/assets/docs/docs/documentation/deploying/observability/index.html +3 -3
- solace_agent_mesh/assets/docs/docs/documentation/developing/create-agents/index.html +4 -4
- solace_agent_mesh/assets/docs/docs/documentation/developing/create-gateways/index.html +5 -5
- solace_agent_mesh/assets/docs/docs/documentation/developing/creating-python-tools/index.html +4 -4
- solace_agent_mesh/assets/docs/docs/documentation/developing/creating-service-providers/index.html +4 -4
- solace_agent_mesh/assets/docs/docs/documentation/developing/evaluations/index.html +135 -0
- solace_agent_mesh/assets/docs/docs/documentation/developing/index.html +6 -4
- solace_agent_mesh/assets/docs/docs/documentation/developing/structure/index.html +4 -4
- solace_agent_mesh/assets/docs/docs/documentation/developing/tutorials/bedrock-agents/index.html +4 -4
- solace_agent_mesh/assets/docs/docs/documentation/developing/tutorials/custom-agent/index.html +4 -4
- solace_agent_mesh/assets/docs/docs/documentation/developing/tutorials/event-mesh-gateway/index.html +5 -5
- solace_agent_mesh/assets/docs/docs/documentation/developing/tutorials/mcp-integration/index.html +4 -4
- solace_agent_mesh/assets/docs/docs/documentation/developing/tutorials/mongodb-integration/index.html +4 -4
- solace_agent_mesh/assets/docs/docs/documentation/developing/tutorials/rag-integration/index.html +4 -4
- solace_agent_mesh/assets/docs/docs/documentation/developing/tutorials/rest-gateway/index.html +4 -4
- solace_agent_mesh/assets/docs/docs/documentation/developing/tutorials/slack-integration/index.html +4 -4
- solace_agent_mesh/assets/docs/docs/documentation/developing/tutorials/sql-database/index.html +4 -4
- solace_agent_mesh/assets/docs/docs/documentation/enterprise/index.html +3 -3
- solace_agent_mesh/assets/docs/docs/documentation/enterprise/installation/index.html +3 -3
- solace_agent_mesh/assets/docs/docs/documentation/enterprise/rbac-setup-guide/index.html +3 -3
- solace_agent_mesh/assets/docs/docs/documentation/enterprise/single-sign-on/index.html +3 -3
- solace_agent_mesh/assets/docs/docs/documentation/getting-started/architecture/index.html +3 -3
- solace_agent_mesh/assets/docs/docs/documentation/getting-started/index.html +3 -3
- solace_agent_mesh/assets/docs/docs/documentation/getting-started/introduction/index.html +3 -3
- solace_agent_mesh/assets/docs/docs/documentation/getting-started/try-agent-mesh/index.html +3 -3
- solace_agent_mesh/assets/docs/docs/documentation/installing-and-configuring/configurations/index.html +6 -5
- solace_agent_mesh/assets/docs/docs/documentation/installing-and-configuring/index.html +3 -3
- solace_agent_mesh/assets/docs/docs/documentation/installing-and-configuring/installation/index.html +3 -3
- solace_agent_mesh/assets/docs/docs/documentation/installing-and-configuring/large_language_models/index.html +100 -3
- solace_agent_mesh/assets/docs/docs/documentation/installing-and-configuring/run-project/index.html +3 -3
- solace_agent_mesh/assets/docs/docs/documentation/migrations/a2a-upgrade/a2a-gateway-upgrade-to-0.3.0/index.html +3 -3
- solace_agent_mesh/assets/docs/docs/documentation/migrations/a2a-upgrade/a2a-technical-migration-map/index.html +3 -3
- solace_agent_mesh/assets/docs/lunr-index-1761248203150.json +1 -0
- solace_agent_mesh/assets/docs/lunr-index.json +1 -1
- solace_agent_mesh/assets/docs/search-doc-1761248203150.json +1 -0
- solace_agent_mesh/assets/docs/search-doc.json +1 -1
- solace_agent_mesh/assets/docs/sitemap.xml +1 -1
- solace_agent_mesh/cli/__init__.py +1 -1
- solace_agent_mesh/cli/commands/add_cmd/agent_cmd.py +2 -69
- solace_agent_mesh/cli/commands/eval_cmd.py +11 -49
- solace_agent_mesh/cli/commands/init_cmd/__init__.py +0 -5
- solace_agent_mesh/cli/commands/init_cmd/env_step.py +10 -12
- solace_agent_mesh/cli/commands/init_cmd/orchestrator_step.py +9 -61
- solace_agent_mesh/cli/commands/init_cmd/webui_gateway_step.py +9 -49
- solace_agent_mesh/cli/commands/plugin_cmd/add_cmd.py +1 -2
- solace_agent_mesh/client/webui/frontend/static/assets/{authCallback-DwrxZE0E.js → authCallback-BTf6dqwp.js} +1 -1
- solace_agent_mesh/client/webui/frontend/static/assets/{client-DarGQzyw.js → client-CaY59VuC.js} +1 -1
- solace_agent_mesh/client/webui/frontend/static/assets/main-B32noGmR.js +342 -0
- solace_agent_mesh/client/webui/frontend/static/assets/main-DHJKSW1S.css +1 -0
- solace_agent_mesh/client/webui/frontend/static/assets/{vendor-BKIeiHj_.js → vendor-BEmvJSYz.js} +1 -1
- solace_agent_mesh/client/webui/frontend/static/auth-callback.html +3 -3
- solace_agent_mesh/client/webui/frontend/static/index.html +4 -4
- solace_agent_mesh/common/a2a/__init__.py +24 -0
- solace_agent_mesh/common/a2a/artifact.py +39 -0
- solace_agent_mesh/common/a2a/events.py +29 -0
- solace_agent_mesh/common/a2a/message.py +68 -0
- solace_agent_mesh/common/a2a/protocol.py +151 -1
- solace_agent_mesh/common/agent_registry.py +83 -3
- solace_agent_mesh/common/constants.py +3 -1
- solace_agent_mesh/common/sac/sam_component_base.py +383 -4
- solace_agent_mesh/common/utils/pydantic_utils.py +12 -0
- solace_agent_mesh/config_portal/backend/common.py +1 -1
- solace_agent_mesh/config_portal/frontend/static/client/assets/_index-ByU1X1HD.js +98 -0
- solace_agent_mesh/config_portal/frontend/static/client/assets/{manifest-44d62be6.js → manifest-61038fc6.js} +1 -1
- solace_agent_mesh/config_portal/frontend/static/client/index.html +1 -1
- solace_agent_mesh/evaluation/evaluator.py +128 -104
- solace_agent_mesh/evaluation/message_organizer.py +116 -110
- solace_agent_mesh/evaluation/report_data_processor.py +84 -86
- solace_agent_mesh/evaluation/report_generator.py +73 -79
- solace_agent_mesh/evaluation/run.py +421 -235
- solace_agent_mesh/evaluation/shared/__init__.py +92 -0
- solace_agent_mesh/evaluation/shared/constants.py +47 -0
- solace_agent_mesh/evaluation/shared/exceptions.py +50 -0
- solace_agent_mesh/evaluation/shared/helpers.py +35 -0
- solace_agent_mesh/evaluation/shared/test_case_loader.py +167 -0
- solace_agent_mesh/evaluation/shared/test_suite_loader.py +280 -0
- solace_agent_mesh/evaluation/subscriber.py +111 -232
- solace_agent_mesh/evaluation/summary_builder.py +227 -117
- solace_agent_mesh/gateway/base/app.py +16 -1
- solace_agent_mesh/gateway/base/component.py +112 -39
- solace_agent_mesh/gateway/http_sse/alembic/versions/20251015_add_session_performance_indexes.py +70 -0
- solace_agent_mesh/gateway/http_sse/component.py +99 -3
- solace_agent_mesh/gateway/http_sse/dependencies.py +4 -4
- solace_agent_mesh/gateway/http_sse/main.py +1 -0
- solace_agent_mesh/gateway/http_sse/repository/chat_task_repository.py +12 -13
- solace_agent_mesh/gateway/http_sse/repository/feedback_repository.py +15 -18
- solace_agent_mesh/gateway/http_sse/repository/interfaces.py +25 -18
- solace_agent_mesh/gateway/http_sse/repository/session_repository.py +30 -26
- solace_agent_mesh/gateway/http_sse/repository/task_repository.py +35 -44
- solace_agent_mesh/gateway/http_sse/routers/agent_cards.py +4 -3
- solace_agent_mesh/gateway/http_sse/routers/artifacts.py +95 -203
- solace_agent_mesh/gateway/http_sse/routers/dto/responses/session_responses.py +4 -3
- solace_agent_mesh/gateway/http_sse/routers/sessions.py +2 -2
- solace_agent_mesh/gateway/http_sse/routers/tasks.py +33 -41
- solace_agent_mesh/gateway/http_sse/routers/users.py +47 -1
- solace_agent_mesh/gateway/http_sse/routers/visualization.py +17 -11
- solace_agent_mesh/gateway/http_sse/services/data_retention_service.py +4 -4
- solace_agent_mesh/gateway/http_sse/services/feedback_service.py +51 -43
- solace_agent_mesh/gateway/http_sse/services/session_service.py +20 -20
- solace_agent_mesh/gateway/http_sse/services/task_logger_service.py +8 -8
- solace_agent_mesh/gateway/http_sse/shared/base_repository.py +45 -71
- solace_agent_mesh/gateway/http_sse/shared/types.py +0 -18
- solace_agent_mesh/templates/gateway_config_template.yaml +0 -5
- solace_agent_mesh/templates/logging_config_template.ini +10 -6
- solace_agent_mesh/templates/plugin_gateway_config_template.yaml +0 -3
- solace_agent_mesh/templates/shared_config.yaml +40 -0
- {solace_agent_mesh-1.5.1.dist-info → solace_agent_mesh-1.6.1.dist-info}/METADATA +47 -21
- {solace_agent_mesh-1.5.1.dist-info → solace_agent_mesh-1.6.1.dist-info}/RECORD +166 -145
- solace_agent_mesh/assets/docs/assets/js/5c2bd65f.e49689dd.js +0 -1
- solace_agent_mesh/assets/docs/assets/js/6ad8f0bd.39d5851d.js +0 -1
- solace_agent_mesh/assets/docs/assets/js/71da7b71.804d6567.js +0 -1
- solace_agent_mesh/assets/docs/assets/js/77cf947d.64c9bd6c.js +0 -1
- solace_agent_mesh/assets/docs/assets/js/9e9d0a82.dd810042.js +0 -1
- solace_agent_mesh/assets/docs/assets/js/db924877.cbc66f02.js +0 -1
- solace_agent_mesh/assets/docs/assets/js/de915948.139b4b9c.js +0 -1
- solace_agent_mesh/assets/docs/assets/js/e6f9706b.582a78ca.js +0 -1
- solace_agent_mesh/assets/docs/assets/js/f284c35a.5766a13d.js +0 -1
- solace_agent_mesh/assets/docs/assets/js/ff4d71f2.9c0297a6.js +0 -1
- solace_agent_mesh/assets/docs/assets/js/runtime~main.18dc45dd.js +0 -1
- solace_agent_mesh/assets/docs/lunr-index-1760121512891.json +0 -1
- solace_agent_mesh/assets/docs/search-doc-1760121512891.json +0 -1
- solace_agent_mesh/client/webui/frontend/static/assets/main-2nd1gbaH.js +0 -339
- solace_agent_mesh/client/webui/frontend/static/assets/main-DoKXctCM.css +0 -1
- solace_agent_mesh/config_portal/frontend/static/client/assets/_index-BNuqpWDc.js +0 -98
- solace_agent_mesh/evaluation/config_loader.py +0 -657
- solace_agent_mesh/evaluation/test_case_loader.py +0 -714
- /solace_agent_mesh/assets/docs/assets/js/{main.bd3c34f3.js.LICENSE.txt → main.b12eac43.js.LICENSE.txt} +0 -0
- {solace_agent_mesh-1.5.1.dist-info → solace_agent_mesh-1.6.1.dist-info}/WHEEL +0 -0
- {solace_agent_mesh-1.5.1.dist-info → solace_agent_mesh-1.6.1.dist-info}/entry_points.txt +0 -0
- {solace_agent_mesh-1.5.1.dist-info → solace_agent_mesh-1.6.1.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,1115 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Concrete implementation of a proxy for standard A2A-over-HTTPS agents.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
|
|
7
|
+
import asyncio
|
|
8
|
+
import uuid
|
|
9
|
+
from typing import Any, Dict, List, Optional, Tuple, TYPE_CHECKING, Union
|
|
10
|
+
from urllib.parse import urlparse
|
|
11
|
+
|
|
12
|
+
import httpx
|
|
13
|
+
|
|
14
|
+
from a2a.client import (
|
|
15
|
+
A2ACardResolver,
|
|
16
|
+
Client,
|
|
17
|
+
ClientConfig,
|
|
18
|
+
ClientFactory,
|
|
19
|
+
A2AClientHTTPError,
|
|
20
|
+
AuthInterceptor,
|
|
21
|
+
InMemoryContextCredentialStore,
|
|
22
|
+
)
|
|
23
|
+
from a2a.client.errors import A2AClientJSONRPCError
|
|
24
|
+
from .oauth_token_cache import OAuth2TokenCache
|
|
25
|
+
from a2a.types import (
|
|
26
|
+
A2ARequest,
|
|
27
|
+
AgentCard,
|
|
28
|
+
Artifact,
|
|
29
|
+
CancelTaskRequest,
|
|
30
|
+
Message,
|
|
31
|
+
SendMessageRequest,
|
|
32
|
+
SendStreamingMessageRequest,
|
|
33
|
+
Task,
|
|
34
|
+
TaskArtifactUpdateEvent,
|
|
35
|
+
TaskState,
|
|
36
|
+
TaskStatus,
|
|
37
|
+
TaskStatusUpdateEvent,
|
|
38
|
+
TextPart,
|
|
39
|
+
TransportProtocol,
|
|
40
|
+
)
|
|
41
|
+
|
|
42
|
+
from solace_ai_connector.common.log import log
|
|
43
|
+
|
|
44
|
+
from datetime import datetime, timezone
|
|
45
|
+
|
|
46
|
+
from ....common import a2a
|
|
47
|
+
from ....agent.utils.artifact_helpers import format_artifact_uri
|
|
48
|
+
from ..base.component import BaseProxyComponent
|
|
49
|
+
|
|
50
|
+
if TYPE_CHECKING:
|
|
51
|
+
from ..base.proxy_task_context import ProxyTaskContext
|
|
52
|
+
|
|
53
|
+
info = {
|
|
54
|
+
"class_name": "A2AProxyComponent",
|
|
55
|
+
"description": "A proxy for standard A2A-over-HTTPS agents.",
|
|
56
|
+
"config_parameters": [],
|
|
57
|
+
"input_schema": {},
|
|
58
|
+
"output_schema": {},
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
class A2AProxyComponent(BaseProxyComponent):
|
|
63
|
+
"""
|
|
64
|
+
Concrete proxy component for standard A2A-over-HTTPS agents.
|
|
65
|
+
"""
|
|
66
|
+
|
|
67
|
+
def __init__(self, **kwargs: Any):
|
|
68
|
+
super().__init__(**kwargs)
|
|
69
|
+
# Cache Client instances per (agent_name, session_id, is_streaming) to ensure
|
|
70
|
+
# each session gets its own client with session-specific credentials and streaming mode
|
|
71
|
+
self._a2a_clients: Dict[Tuple[str, str, bool], Client] = {}
|
|
72
|
+
self._credential_store: InMemoryContextCredentialStore = (
|
|
73
|
+
InMemoryContextCredentialStore()
|
|
74
|
+
)
|
|
75
|
+
self._auth_interceptor: AuthInterceptor = AuthInterceptor(
|
|
76
|
+
self._credential_store
|
|
77
|
+
)
|
|
78
|
+
# OAuth 2.0 token cache for client credentials flow
|
|
79
|
+
# Why use asyncio.Lock: Ensures thread-safe access to the token cache
|
|
80
|
+
# when multiple concurrent requests target the same agent
|
|
81
|
+
self._oauth_token_cache: OAuth2TokenCache = OAuth2TokenCache()
|
|
82
|
+
|
|
83
|
+
# Index agent configs by name for O(1) lookup (performance optimization)
|
|
84
|
+
self._agent_config_by_name: Dict[str, Dict[str, Any]] = {
|
|
85
|
+
agent["name"]: agent for agent in self.proxied_agents_config
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
# OAuth 2.0 configuration is now validated by Pydantic models at app initialization
|
|
89
|
+
# No need for separate _validate_oauth_config() method
|
|
90
|
+
|
|
91
|
+
def _get_agent_config(self, agent_name: str) -> Optional[Dict[str, Any]]:
|
|
92
|
+
"""
|
|
93
|
+
O(1) lookup of agent configuration by name.
|
|
94
|
+
|
|
95
|
+
Args:
|
|
96
|
+
agent_name: The name of the agent to look up.
|
|
97
|
+
|
|
98
|
+
Returns:
|
|
99
|
+
The agent configuration dictionary, or None if not found.
|
|
100
|
+
"""
|
|
101
|
+
return self._agent_config_by_name.get(agent_name)
|
|
102
|
+
|
|
103
|
+
async def _fetch_agent_card(
|
|
104
|
+
self, agent_config: Dict[str, Any]
|
|
105
|
+
) -> Optional[AgentCard]:
|
|
106
|
+
"""
|
|
107
|
+
Fetches the AgentCard from a downstream A2A agent via HTTPS.
|
|
108
|
+
"""
|
|
109
|
+
agent_name = agent_config.get("name")
|
|
110
|
+
agent_url = agent_config.get("url")
|
|
111
|
+
agent_card_path = agent_config.get("agent_card_path", "/agent/card.json")
|
|
112
|
+
log_identifier = f"{self.log_identifier}[FetchCard:{agent_name}]"
|
|
113
|
+
|
|
114
|
+
if not agent_url:
|
|
115
|
+
log.error("%s No URL configured for agent.", log_identifier)
|
|
116
|
+
return None
|
|
117
|
+
|
|
118
|
+
try:
|
|
119
|
+
log.info("%s Fetching agent card from %s", log_identifier, agent_url)
|
|
120
|
+
async with httpx.AsyncClient() as client:
|
|
121
|
+
resolver = A2ACardResolver(httpx_client=client, base_url=agent_url)
|
|
122
|
+
agent_card = await resolver.get_agent_card()
|
|
123
|
+
return agent_card
|
|
124
|
+
except A2AClientHTTPError as e:
|
|
125
|
+
log.error(
|
|
126
|
+
"%s HTTP error fetching agent card from %s: %s",
|
|
127
|
+
log_identifier,
|
|
128
|
+
agent_url,
|
|
129
|
+
e,
|
|
130
|
+
)
|
|
131
|
+
except Exception as e:
|
|
132
|
+
log.exception(
|
|
133
|
+
"%s Unexpected error fetching agent card from %s: %s",
|
|
134
|
+
log_identifier,
|
|
135
|
+
agent_url,
|
|
136
|
+
e,
|
|
137
|
+
)
|
|
138
|
+
return None
|
|
139
|
+
|
|
140
|
+
async def _forward_request(
|
|
141
|
+
self, task_context: ProxyTaskContext, request: A2ARequest, agent_name: str
|
|
142
|
+
) -> None:
|
|
143
|
+
"""
|
|
144
|
+
Forwards an A2A request to a downstream A2A-over-HTTPS agent.
|
|
145
|
+
|
|
146
|
+
Implements automatic retry logic for OAuth 2.0 authentication failures.
|
|
147
|
+
If a 401 Unauthorized response is received and the agent uses OAuth 2.0,
|
|
148
|
+
the cached token is invalidated and the request is retried once with a
|
|
149
|
+
fresh token.
|
|
150
|
+
"""
|
|
151
|
+
log_identifier = (
|
|
152
|
+
f"{self.log_identifier}[ForwardRequest:{task_context.task_id}:{agent_name}]"
|
|
153
|
+
)
|
|
154
|
+
|
|
155
|
+
# Step 1: Initialize retry counter
|
|
156
|
+
# Why only retry once: Prevents infinite loops on persistent auth failures.
|
|
157
|
+
# First 401 may be due to token expiration between cache check and request;
|
|
158
|
+
# second 401 indicates a configuration or authorization issue (not transient).
|
|
159
|
+
max_auth_retries: int = 1
|
|
160
|
+
auth_retry_count: int = 0
|
|
161
|
+
|
|
162
|
+
# Step 2: Create while loop for retry logic
|
|
163
|
+
while auth_retry_count <= max_auth_retries:
|
|
164
|
+
try:
|
|
165
|
+
# Get or create A2AClient
|
|
166
|
+
client = await self._get_or_create_a2a_client(agent_name, task_context)
|
|
167
|
+
if not client:
|
|
168
|
+
raise ValueError(
|
|
169
|
+
f"Could not create A2A client for agent '{agent_name}'"
|
|
170
|
+
)
|
|
171
|
+
|
|
172
|
+
# Create context with sessionId (camelCase!) so AuthInterceptor can look up credentials
|
|
173
|
+
from a2a.client.middleware import ClientCallContext
|
|
174
|
+
|
|
175
|
+
session_id = task_context.a2a_context.get(
|
|
176
|
+
"session_id", "default_session"
|
|
177
|
+
)
|
|
178
|
+
call_context = ClientCallContext(state={"sessionId": session_id})
|
|
179
|
+
|
|
180
|
+
# Forward the request with context
|
|
181
|
+
if isinstance(
|
|
182
|
+
request, (SendStreamingMessageRequest, SendMessageRequest)
|
|
183
|
+
):
|
|
184
|
+
# Extract the Message from the request params
|
|
185
|
+
message_to_send = request.params.message
|
|
186
|
+
|
|
187
|
+
# WORKAROUND: The A2A SDK has a bug in ClientTaskManager that breaks streaming.
|
|
188
|
+
# For streaming requests, we bypass the Client.send_message() method and call
|
|
189
|
+
# the transport directly to avoid the buggy ClientTaskManager.
|
|
190
|
+
# Non-streaming requests work fine with the normal client method.
|
|
191
|
+
# TODO: Remove this workaround once SDK bug is fixed upstream.
|
|
192
|
+
if task_context.a2a_context.get("is_streaming", True):
|
|
193
|
+
# Access transport directly (private API) to bypass ClientTaskManager
|
|
194
|
+
log.debug(
|
|
195
|
+
"%s Using transport directly for streaming request (SDK bug workaround)",
|
|
196
|
+
log_identifier,
|
|
197
|
+
)
|
|
198
|
+
async for raw_event in client._transport.send_message_streaming(
|
|
199
|
+
request.params, context=call_context
|
|
200
|
+
):
|
|
201
|
+
# Process raw events directly without ClientTaskManager
|
|
202
|
+
await self._process_downstream_response(
|
|
203
|
+
raw_event, task_context, client, agent_name
|
|
204
|
+
)
|
|
205
|
+
else:
|
|
206
|
+
# Non-streaming: use normal client method (works fine)
|
|
207
|
+
log.debug(
|
|
208
|
+
"%s Using normal client method for non-streaming request",
|
|
209
|
+
log_identifier,
|
|
210
|
+
)
|
|
211
|
+
async for event in client.send_message(
|
|
212
|
+
message_to_send, context=call_context
|
|
213
|
+
):
|
|
214
|
+
await self._process_downstream_response(
|
|
215
|
+
event, task_context, client, agent_name
|
|
216
|
+
)
|
|
217
|
+
elif isinstance(request, CancelTaskRequest):
|
|
218
|
+
# Forward cancel request to downstream agent
|
|
219
|
+
log.info(
|
|
220
|
+
"%s Forwarding cancel request for task %s to downstream agent.",
|
|
221
|
+
log_identifier,
|
|
222
|
+
request.params.id,
|
|
223
|
+
)
|
|
224
|
+
# Use the modern client's cancel_task method
|
|
225
|
+
# Note: Pass the entire params object (TaskIdParams) instead of just the id string
|
|
226
|
+
# to work around an SDK bug where it doesn't properly handle string inputs
|
|
227
|
+
result = await client.cancel_task(
|
|
228
|
+
request.params, context=call_context
|
|
229
|
+
)
|
|
230
|
+
# Publish the canceled task response
|
|
231
|
+
await self._publish_final_response(result, task_context.a2a_context)
|
|
232
|
+
else:
|
|
233
|
+
log.warning(
|
|
234
|
+
"%s Unhandled request type for forwarding: %s",
|
|
235
|
+
log_identifier,
|
|
236
|
+
type(request),
|
|
237
|
+
)
|
|
238
|
+
|
|
239
|
+
# Step 5: Success - break out of retry loop
|
|
240
|
+
break
|
|
241
|
+
|
|
242
|
+
except RuntimeError as e:
|
|
243
|
+
# WORKAROUND: The A2A SDK raises StopAsyncIteration for connection failures,
|
|
244
|
+
# which Python 3.7+ automatically converts to RuntimeError (PEP 479).
|
|
245
|
+
# We catch this here to provide a more meaningful error message.
|
|
246
|
+
# This should be fixed upstream in the A2A SDK to raise proper connection exceptions.
|
|
247
|
+
if "StopAsyncIteration" in str(e):
|
|
248
|
+
error_msg = (
|
|
249
|
+
f"Failed to connect to agent '{agent_name}': "
|
|
250
|
+
"Connection refused or agent unreachable"
|
|
251
|
+
)
|
|
252
|
+
log.error(
|
|
253
|
+
"%s Connection error (SDK raised StopAsyncIteration): %s",
|
|
254
|
+
log_identifier,
|
|
255
|
+
error_msg,
|
|
256
|
+
)
|
|
257
|
+
# Raise a more descriptive error that will be caught by the outer handler
|
|
258
|
+
raise ConnectionError(error_msg) from e
|
|
259
|
+
else:
|
|
260
|
+
# Some other RuntimeError - re-raise it
|
|
261
|
+
raise
|
|
262
|
+
|
|
263
|
+
except A2AClientJSONRPCError as e:
|
|
264
|
+
# Handle JSON-RPC protocol errors
|
|
265
|
+
log.error(
|
|
266
|
+
"%s JSON-RPC error from agent '%s': %s",
|
|
267
|
+
log_identifier,
|
|
268
|
+
agent_name,
|
|
269
|
+
e.error,
|
|
270
|
+
)
|
|
271
|
+
# TODO: Publish error response to Solace
|
|
272
|
+
# Do not retry - this is a protocol-level error
|
|
273
|
+
raise
|
|
274
|
+
|
|
275
|
+
except ConnectionError as e:
|
|
276
|
+
# Connection errors (including those converted from RuntimeError above)
|
|
277
|
+
log.error(
|
|
278
|
+
"%s Connection error forwarding request to agent '%s': %s",
|
|
279
|
+
log_identifier,
|
|
280
|
+
agent_name,
|
|
281
|
+
e,
|
|
282
|
+
)
|
|
283
|
+
raise
|
|
284
|
+
|
|
285
|
+
except A2AClientHTTPError as e:
|
|
286
|
+
# Step 4: Add specific handling for 401 Unauthorized errors
|
|
287
|
+
# The error might be wrapped in an SSE parsing error, so we need to check
|
|
288
|
+
# if the underlying cause is a 401
|
|
289
|
+
is_401_error = False
|
|
290
|
+
|
|
291
|
+
# Check if this is directly a 401
|
|
292
|
+
if hasattr(e, "status_code") and e.status_code == 401:
|
|
293
|
+
is_401_error = True
|
|
294
|
+
# Check if this is an SSE parsing error caused by a 401 response
|
|
295
|
+
elif "401" in str(e) or "Unauthorized" in str(e):
|
|
296
|
+
is_401_error = True
|
|
297
|
+
# Check if the error message mentions application/json content type
|
|
298
|
+
# (which is what 401 responses typically return)
|
|
299
|
+
elif "application/json" in str(e) and "text/event-stream" in str(e):
|
|
300
|
+
# This is likely an SSE parsing error caused by a 401 JSON response
|
|
301
|
+
is_401_error = True
|
|
302
|
+
|
|
303
|
+
if is_401_error and auth_retry_count < max_auth_retries:
|
|
304
|
+
log.warning(
|
|
305
|
+
"%s Received 401 Unauthorized from agent '%s' (detected from error: %s). Attempting token refresh (retry %d/%d).",
|
|
306
|
+
log_identifier,
|
|
307
|
+
agent_name,
|
|
308
|
+
str(e)[:100],
|
|
309
|
+
auth_retry_count + 1,
|
|
310
|
+
max_auth_retries,
|
|
311
|
+
)
|
|
312
|
+
|
|
313
|
+
should_retry = await self._handle_auth_error(
|
|
314
|
+
agent_name, task_context
|
|
315
|
+
)
|
|
316
|
+
if should_retry:
|
|
317
|
+
auth_retry_count += 1
|
|
318
|
+
continue # Retry with fresh token
|
|
319
|
+
|
|
320
|
+
# Not a retryable auth error, or max retries exceeded
|
|
321
|
+
log.exception(
|
|
322
|
+
"%s HTTP error forwarding request: %s",
|
|
323
|
+
log_identifier,
|
|
324
|
+
e,
|
|
325
|
+
)
|
|
326
|
+
raise
|
|
327
|
+
|
|
328
|
+
except Exception as e:
|
|
329
|
+
log.exception(
|
|
330
|
+
"%s Unexpected error forwarding request: %s",
|
|
331
|
+
log_identifier,
|
|
332
|
+
e,
|
|
333
|
+
)
|
|
334
|
+
# Let base class exception handler in _handle_a2a_request catch this
|
|
335
|
+
# and publish an error response.
|
|
336
|
+
raise
|
|
337
|
+
|
|
338
|
+
async def _handle_auth_error(
|
|
339
|
+
self, agent_name: str, task_context: ProxyTaskContext
|
|
340
|
+
) -> bool:
|
|
341
|
+
"""
|
|
342
|
+
Handles authentication errors by invalidating cached tokens and clients.
|
|
343
|
+
|
|
344
|
+
This method is called when a 401 Unauthorized response is received from
|
|
345
|
+
a downstream agent. It checks if the agent uses OAuth 2.0 authentication,
|
|
346
|
+
and if so, invalidates the cached token and removes ALL cached clients
|
|
347
|
+
for this agent/session combination (both streaming and non-streaming).
|
|
348
|
+
|
|
349
|
+
Args:
|
|
350
|
+
agent_name: The name of the agent that returned 401.
|
|
351
|
+
task_context: The current task context.
|
|
352
|
+
|
|
353
|
+
Returns:
|
|
354
|
+
True if token was invalidated and retry should be attempted.
|
|
355
|
+
False if no retry should be attempted (e.g., static token).
|
|
356
|
+
"""
|
|
357
|
+
log_identifier = f"{self.log_identifier}[AuthError:{agent_name}]"
|
|
358
|
+
|
|
359
|
+
# Step 1: Retrieve agent configuration using O(1) lookup
|
|
360
|
+
agent_config = self._get_agent_config(agent_name)
|
|
361
|
+
|
|
362
|
+
if not agent_config:
|
|
363
|
+
log.warning(
|
|
364
|
+
"%s Agent configuration not found. Cannot handle auth error.",
|
|
365
|
+
log_identifier,
|
|
366
|
+
)
|
|
367
|
+
return False
|
|
368
|
+
|
|
369
|
+
# Step 2: Check authentication type
|
|
370
|
+
auth_config = agent_config.get("authentication")
|
|
371
|
+
if not auth_config:
|
|
372
|
+
log.debug(
|
|
373
|
+
"%s No authentication configured for agent. No retry needed.",
|
|
374
|
+
log_identifier,
|
|
375
|
+
)
|
|
376
|
+
return False
|
|
377
|
+
|
|
378
|
+
auth_type = auth_config.get("type")
|
|
379
|
+
if not auth_type:
|
|
380
|
+
# Legacy config - infer from scheme
|
|
381
|
+
scheme = auth_config.get("scheme", "bearer")
|
|
382
|
+
auth_type = "static_bearer" if scheme == "bearer" else "static_apikey"
|
|
383
|
+
|
|
384
|
+
if auth_type != "oauth2_client_credentials":
|
|
385
|
+
log.debug(
|
|
386
|
+
"%s Agent uses '%s' authentication (not OAuth 2.0). No retry for static tokens.",
|
|
387
|
+
log_identifier,
|
|
388
|
+
auth_type,
|
|
389
|
+
)
|
|
390
|
+
return False
|
|
391
|
+
|
|
392
|
+
# Step 3: Invalidate cached OAuth token
|
|
393
|
+
log.info(
|
|
394
|
+
"%s Invalidating cached OAuth 2.0 token for agent '%s'.",
|
|
395
|
+
log_identifier,
|
|
396
|
+
agent_name,
|
|
397
|
+
)
|
|
398
|
+
await self._oauth_token_cache.invalidate(agent_name)
|
|
399
|
+
|
|
400
|
+
# Step 4: Remove ALL cached Clients for this agent/session combination
|
|
401
|
+
# We clear both streaming and non-streaming clients because:
|
|
402
|
+
# 1. Both share the same session_id in the credential store
|
|
403
|
+
# 2. Both would have been created with the same expired token
|
|
404
|
+
# 3. We want fresh tokens for any subsequent requests
|
|
405
|
+
# The cache key is a 3-tuple: (agent_name, session_id, is_streaming)
|
|
406
|
+
session_id = task_context.a2a_context.get("session_id", "default_session")
|
|
407
|
+
|
|
408
|
+
clients_removed = 0
|
|
409
|
+
for is_streaming in [True, False]:
|
|
410
|
+
cache_key = (agent_name, session_id, is_streaming)
|
|
411
|
+
if cache_key in self._a2a_clients:
|
|
412
|
+
self._a2a_clients.pop(cache_key)
|
|
413
|
+
clients_removed += 1
|
|
414
|
+
log.info(
|
|
415
|
+
"%s Removed cached Client for agent '%s' session '%s' streaming=%s.",
|
|
416
|
+
log_identifier,
|
|
417
|
+
agent_name,
|
|
418
|
+
session_id,
|
|
419
|
+
is_streaming,
|
|
420
|
+
)
|
|
421
|
+
|
|
422
|
+
if clients_removed == 0:
|
|
423
|
+
log.warning(
|
|
424
|
+
"%s No cached Clients found for agent '%s' session '%s'. This is unexpected.",
|
|
425
|
+
log_identifier,
|
|
426
|
+
agent_name,
|
|
427
|
+
session_id,
|
|
428
|
+
)
|
|
429
|
+
else:
|
|
430
|
+
log.info(
|
|
431
|
+
"%s Removed %d cached Client(s). Will create fresh client(s) with new token on retry.",
|
|
432
|
+
log_identifier,
|
|
433
|
+
clients_removed,
|
|
434
|
+
)
|
|
435
|
+
|
|
436
|
+
# Step 5: Return True to signal retry should be attempted
|
|
437
|
+
log.info(
|
|
438
|
+
"%s Auth error handling complete. Retry will be attempted with fresh token.",
|
|
439
|
+
log_identifier,
|
|
440
|
+
)
|
|
441
|
+
return True
|
|
442
|
+
|
|
443
|
+
async def _fetch_oauth2_token(
|
|
444
|
+
self, agent_name: str, auth_config: Dict[str, Any]
|
|
445
|
+
) -> str:
|
|
446
|
+
"""
|
|
447
|
+
Fetches an OAuth 2.0 access token using the client credentials flow.
|
|
448
|
+
|
|
449
|
+
This method implements token caching to avoid unnecessary token requests.
|
|
450
|
+
Tokens are cached per agent and automatically expire based on the configured
|
|
451
|
+
cache duration (default: 55 minutes).
|
|
452
|
+
|
|
453
|
+
Args:
|
|
454
|
+
agent_name: The name of the agent (used as cache key).
|
|
455
|
+
auth_config: Authentication configuration dictionary containing:
|
|
456
|
+
- token_url: OAuth 2.0 token endpoint (required)
|
|
457
|
+
- client_id: OAuth 2.0 client identifier (required)
|
|
458
|
+
- client_secret: OAuth 2.0 client secret (required)
|
|
459
|
+
- scope: (optional) Space-separated scope string
|
|
460
|
+
- token_cache_duration_seconds: (optional) Cache duration in seconds
|
|
461
|
+
|
|
462
|
+
Returns:
|
|
463
|
+
A valid OAuth 2.0 access token (string).
|
|
464
|
+
|
|
465
|
+
Raises:
|
|
466
|
+
ValueError: If required OAuth parameters are missing or invalid.
|
|
467
|
+
httpx.HTTPStatusError: If token request returns non-2xx status.
|
|
468
|
+
httpx.RequestError: If network error occurs.
|
|
469
|
+
"""
|
|
470
|
+
log_identifier = f"{self.log_identifier}[OAuth2:{agent_name}]"
|
|
471
|
+
|
|
472
|
+
# Step 1: Check cache first
|
|
473
|
+
cached_token = await self._oauth_token_cache.get(agent_name)
|
|
474
|
+
if cached_token:
|
|
475
|
+
log.debug("%s Using cached OAuth token.", log_identifier)
|
|
476
|
+
return cached_token
|
|
477
|
+
|
|
478
|
+
# Step 2: Validate required parameters
|
|
479
|
+
token_url = auth_config.get("token_url")
|
|
480
|
+
client_id = auth_config.get("client_id")
|
|
481
|
+
client_secret = auth_config.get("client_secret")
|
|
482
|
+
|
|
483
|
+
if not all([token_url, client_id, client_secret]):
|
|
484
|
+
raise ValueError(
|
|
485
|
+
f"{log_identifier} OAuth 2.0 client credentials flow requires "
|
|
486
|
+
"'token_url', 'client_id', and 'client_secret'."
|
|
487
|
+
)
|
|
488
|
+
|
|
489
|
+
# SECURITY: Enforce HTTPS for token URL
|
|
490
|
+
parsed_url = urlparse(token_url)
|
|
491
|
+
if parsed_url.scheme != "https":
|
|
492
|
+
log.error(
|
|
493
|
+
"%s OAuth 2.0 token_url must use HTTPS for security. Got scheme: '%s'",
|
|
494
|
+
log_identifier,
|
|
495
|
+
parsed_url.scheme,
|
|
496
|
+
)
|
|
497
|
+
raise ValueError(
|
|
498
|
+
f"{log_identifier} OAuth 2.0 token_url must use HTTPS for security. "
|
|
499
|
+
f"Got: {parsed_url.scheme}://"
|
|
500
|
+
)
|
|
501
|
+
|
|
502
|
+
# Step 3: Extract optional parameters
|
|
503
|
+
scope = auth_config.get("scope", "")
|
|
504
|
+
# Why 3300 seconds (55 minutes): Provides a 5-minute safety margin before
|
|
505
|
+
# typical 60-minute token expiration, preventing token expiration mid-request
|
|
506
|
+
cache_duration = auth_config.get("token_cache_duration_seconds", 3300)
|
|
507
|
+
|
|
508
|
+
# Step 4: Log token acquisition attempt
|
|
509
|
+
# SECURITY: Never log client_secret or access_token to prevent credential leakage
|
|
510
|
+
log.info(
|
|
511
|
+
"%s Fetching new OAuth 2.0 token from %s (scope: %s)",
|
|
512
|
+
log_identifier,
|
|
513
|
+
token_url,
|
|
514
|
+
scope or "default",
|
|
515
|
+
)
|
|
516
|
+
|
|
517
|
+
try:
|
|
518
|
+
# Step 5: Create temporary httpx client with 30-second timeout
|
|
519
|
+
async with httpx.AsyncClient(timeout=30.0) as client:
|
|
520
|
+
# Step 6: Execute POST request
|
|
521
|
+
# SECURITY: client_secret is sent in POST body (not logged or in URL)
|
|
522
|
+
response = await client.post(
|
|
523
|
+
token_url,
|
|
524
|
+
data={
|
|
525
|
+
"grant_type": "client_credentials",
|
|
526
|
+
"client_id": client_id,
|
|
527
|
+
"client_secret": client_secret,
|
|
528
|
+
"scope": scope,
|
|
529
|
+
},
|
|
530
|
+
headers={
|
|
531
|
+
"Content-Type": "application/x-www-form-urlencoded",
|
|
532
|
+
"Accept": "application/json",
|
|
533
|
+
},
|
|
534
|
+
)
|
|
535
|
+
response.raise_for_status()
|
|
536
|
+
|
|
537
|
+
# Step 7: Parse response
|
|
538
|
+
token_response = response.json()
|
|
539
|
+
access_token = token_response.get("access_token")
|
|
540
|
+
|
|
541
|
+
if not access_token:
|
|
542
|
+
raise ValueError(
|
|
543
|
+
f"{log_identifier} Token response missing 'access_token' field. "
|
|
544
|
+
f"Response keys: {list(token_response.keys())}"
|
|
545
|
+
)
|
|
546
|
+
|
|
547
|
+
# Step 8: Cache the token
|
|
548
|
+
await self._oauth_token_cache.set(
|
|
549
|
+
agent_name, access_token, cache_duration
|
|
550
|
+
)
|
|
551
|
+
|
|
552
|
+
# Step 9: Log success
|
|
553
|
+
log.info(
|
|
554
|
+
"%s Successfully obtained OAuth 2.0 token (cached for %ds)",
|
|
555
|
+
log_identifier,
|
|
556
|
+
cache_duration,
|
|
557
|
+
)
|
|
558
|
+
|
|
559
|
+
# Step 10: Return access token
|
|
560
|
+
return access_token
|
|
561
|
+
|
|
562
|
+
except httpx.HTTPStatusError as e:
|
|
563
|
+
log.error(
|
|
564
|
+
"%s OAuth 2.0 token request failed with status %d: %s",
|
|
565
|
+
log_identifier,
|
|
566
|
+
e.response.status_code,
|
|
567
|
+
e.response.text,
|
|
568
|
+
)
|
|
569
|
+
raise
|
|
570
|
+
except httpx.RequestError as e:
|
|
571
|
+
log.error(
|
|
572
|
+
"%s OAuth 2.0 token request failed: %s",
|
|
573
|
+
log_identifier,
|
|
574
|
+
e,
|
|
575
|
+
)
|
|
576
|
+
raise
|
|
577
|
+
except Exception as e:
|
|
578
|
+
log.exception(
|
|
579
|
+
"%s Unexpected error fetching OAuth 2.0 token: %s",
|
|
580
|
+
log_identifier,
|
|
581
|
+
e,
|
|
582
|
+
)
|
|
583
|
+
raise
|
|
584
|
+
|
|
585
|
+
async def _get_or_create_a2a_client(
|
|
586
|
+
self, agent_name: str, task_context: ProxyTaskContext
|
|
587
|
+
) -> Optional[Client]:
|
|
588
|
+
"""
|
|
589
|
+
Gets a cached Client or creates a new one for the given agent, session, and streaming mode.
|
|
590
|
+
|
|
591
|
+
Caches clients per (agent_name, session_id, is_streaming) to ensure each session gets its
|
|
592
|
+
own client with session-specific credentials and the correct streaming mode. This is necessary because:
|
|
593
|
+
1. The A2A SDK's AuthInterceptor uses session-based credential lookup
|
|
594
|
+
2. The Client's streaming mode is set at creation time and cannot be changed
|
|
595
|
+
|
|
596
|
+
Supports multiple authentication types:
|
|
597
|
+
- static_bearer: Static bearer token authentication
|
|
598
|
+
- static_apikey: Static API key authentication
|
|
599
|
+
- oauth2_client_credentials: OAuth 2.0 Client Credentials flow with automatic token refresh
|
|
600
|
+
|
|
601
|
+
For backward compatibility, legacy configurations without a 'type' field
|
|
602
|
+
will have their type inferred from the 'scheme' field.
|
|
603
|
+
|
|
604
|
+
The client's streaming mode is determined by the original request type from
|
|
605
|
+
the gateway (message/send vs message/stream).
|
|
606
|
+
"""
|
|
607
|
+
session_id = task_context.a2a_context.get("session_id", "default_session")
|
|
608
|
+
is_streaming = task_context.a2a_context.get("is_streaming", True)
|
|
609
|
+
cache_key = (agent_name, session_id, is_streaming)
|
|
610
|
+
|
|
611
|
+
if cache_key in self._a2a_clients:
|
|
612
|
+
return self._a2a_clients[cache_key]
|
|
613
|
+
|
|
614
|
+
# Use O(1) lookup for agent configuration
|
|
615
|
+
agent_config = self._get_agent_config(agent_name)
|
|
616
|
+
if not agent_config:
|
|
617
|
+
log.error(f"No configuration found for proxied agent '{agent_name}'")
|
|
618
|
+
return None
|
|
619
|
+
|
|
620
|
+
agent_card = self.agent_registry.get_agent(agent_name)
|
|
621
|
+
if not agent_card:
|
|
622
|
+
log.error(f"Agent card not found for '{agent_name}' in registry.")
|
|
623
|
+
return None
|
|
624
|
+
|
|
625
|
+
# Resolve timeout - ensure we always have a valid timeout value
|
|
626
|
+
default_timeout = self.get_config("default_request_timeout_seconds", 300)
|
|
627
|
+
agent_timeout = agent_config.get("request_timeout_seconds")
|
|
628
|
+
if agent_timeout is None:
|
|
629
|
+
agent_timeout = default_timeout
|
|
630
|
+
log.info("Using timeout of %ss for agent '%s'.", agent_timeout, agent_name)
|
|
631
|
+
|
|
632
|
+
# Create a new httpx client with the specific timeout for this agent
|
|
633
|
+
# httpx.Timeout requires explicit values for connect, read, write, and pool
|
|
634
|
+
httpx_client_for_agent = httpx.AsyncClient(
|
|
635
|
+
timeout=httpx.Timeout(
|
|
636
|
+
connect=agent_timeout,
|
|
637
|
+
read=agent_timeout,
|
|
638
|
+
write=agent_timeout,
|
|
639
|
+
pool=agent_timeout,
|
|
640
|
+
)
|
|
641
|
+
)
|
|
642
|
+
|
|
643
|
+
# Setup authentication if configured
|
|
644
|
+
auth_config = agent_config.get("authentication")
|
|
645
|
+
if auth_config:
|
|
646
|
+
auth_type = auth_config.get("type")
|
|
647
|
+
|
|
648
|
+
# Determine auth type (with backward compatibility)
|
|
649
|
+
if not auth_type:
|
|
650
|
+
# Legacy config: infer type from 'scheme' field
|
|
651
|
+
scheme = auth_config.get("scheme", "bearer")
|
|
652
|
+
if scheme == "bearer":
|
|
653
|
+
auth_type = "static_bearer"
|
|
654
|
+
elif scheme == "apikey":
|
|
655
|
+
auth_type = "static_apikey"
|
|
656
|
+
else:
|
|
657
|
+
raise ValueError(
|
|
658
|
+
f"Unknown legacy authentication scheme '{scheme}' for agent '{agent_name}'. "
|
|
659
|
+
f"Supported schemes: 'bearer', 'apikey'."
|
|
660
|
+
)
|
|
661
|
+
|
|
662
|
+
log.warning(
|
|
663
|
+
"%s Using legacy authentication config for agent '%s'. "
|
|
664
|
+
"Consider migrating to 'type' field.",
|
|
665
|
+
self.log_identifier,
|
|
666
|
+
agent_name,
|
|
667
|
+
)
|
|
668
|
+
|
|
669
|
+
log.info(
|
|
670
|
+
"%s Configuring authentication type '%s' for agent '%s'",
|
|
671
|
+
self.log_identifier,
|
|
672
|
+
auth_type,
|
|
673
|
+
agent_name,
|
|
674
|
+
)
|
|
675
|
+
|
|
676
|
+
# Route to appropriate handler
|
|
677
|
+
if auth_type == "static_bearer":
|
|
678
|
+
token = auth_config.get("token")
|
|
679
|
+
if not token:
|
|
680
|
+
raise ValueError(
|
|
681
|
+
f"Authentication type 'static_bearer' requires 'token' for agent '{agent_name}'"
|
|
682
|
+
)
|
|
683
|
+
await self._credential_store.set_credentials(
|
|
684
|
+
session_id, "bearer", token
|
|
685
|
+
)
|
|
686
|
+
|
|
687
|
+
elif auth_type == "static_apikey":
|
|
688
|
+
token = auth_config.get("token")
|
|
689
|
+
if not token:
|
|
690
|
+
raise ValueError(
|
|
691
|
+
f"Authentication type 'static_apikey' requires 'token' for agent '{agent_name}'"
|
|
692
|
+
)
|
|
693
|
+
await self._credential_store.set_credentials(
|
|
694
|
+
session_id, "apikey", token
|
|
695
|
+
)
|
|
696
|
+
|
|
697
|
+
elif auth_type == "oauth2_client_credentials":
|
|
698
|
+
# NEW: OAuth 2.0 Client Credentials Flow
|
|
699
|
+
try:
|
|
700
|
+
access_token = await self._fetch_oauth2_token(
|
|
701
|
+
agent_name, auth_config
|
|
702
|
+
)
|
|
703
|
+
await self._credential_store.set_credentials(
|
|
704
|
+
session_id, "bearer", access_token
|
|
705
|
+
)
|
|
706
|
+
except Exception as e:
|
|
707
|
+
log.error(
|
|
708
|
+
"%s Failed to obtain OAuth 2.0 token for agent '%s': %s",
|
|
709
|
+
self.log_identifier,
|
|
710
|
+
agent_name,
|
|
711
|
+
e,
|
|
712
|
+
)
|
|
713
|
+
raise
|
|
714
|
+
|
|
715
|
+
else:
|
|
716
|
+
raise ValueError(
|
|
717
|
+
f"Unsupported authentication type '{auth_type}' for agent '{agent_name}'. "
|
|
718
|
+
f"Supported types: static_bearer, static_apikey, oauth2_client_credentials."
|
|
719
|
+
)
|
|
720
|
+
|
|
721
|
+
# Create ClientConfig for the modern client
|
|
722
|
+
# Use the streaming mode from the original request
|
|
723
|
+
config = ClientConfig(
|
|
724
|
+
streaming=is_streaming,
|
|
725
|
+
polling=False,
|
|
726
|
+
httpx_client=httpx_client_for_agent,
|
|
727
|
+
supported_transports=[TransportProtocol.jsonrpc],
|
|
728
|
+
accepted_output_modes=[],
|
|
729
|
+
)
|
|
730
|
+
|
|
731
|
+
# Create client using ClientFactory
|
|
732
|
+
factory = ClientFactory(config)
|
|
733
|
+
client = factory.create(
|
|
734
|
+
agent_card,
|
|
735
|
+
consumers=None,
|
|
736
|
+
interceptors=[self._auth_interceptor],
|
|
737
|
+
)
|
|
738
|
+
|
|
739
|
+
self._a2a_clients[cache_key] = client
|
|
740
|
+
return client
|
|
741
|
+
|
|
742
|
+
async def _handle_outbound_artifacts(
|
|
743
|
+
self,
|
|
744
|
+
response: Any,
|
|
745
|
+
task_context: ProxyTaskContext,
|
|
746
|
+
agent_name: str,
|
|
747
|
+
) -> List[Dict[str, Any]]:
|
|
748
|
+
"""
|
|
749
|
+
Finds artifacts with byte content, saves them to the proxy's artifact store,
|
|
750
|
+
and mutates the response object to replace bytes with a URI.
|
|
751
|
+
It also uses TextParts within an artifact as a description for the saved file.
|
|
752
|
+
|
|
753
|
+
Returns:
|
|
754
|
+
A list of dictionaries, each representing a saved artifact with its filename and version.
|
|
755
|
+
"""
|
|
756
|
+
from ....agent.utils.artifact_helpers import save_artifact_with_metadata
|
|
757
|
+
|
|
758
|
+
log_identifier = (
|
|
759
|
+
f"{self.log_identifier}[HandleOutboundArtifacts:{task_context.task_id}]"
|
|
760
|
+
)
|
|
761
|
+
saved_artifacts_manifest = []
|
|
762
|
+
|
|
763
|
+
artifacts_to_process: List[Artifact] = []
|
|
764
|
+
if isinstance(response, Task) and response.artifacts:
|
|
765
|
+
artifacts_to_process = response.artifacts
|
|
766
|
+
elif isinstance(response, TaskArtifactUpdateEvent):
|
|
767
|
+
artifacts_to_process = [response.artifact]
|
|
768
|
+
|
|
769
|
+
if not artifacts_to_process:
|
|
770
|
+
return saved_artifacts_manifest
|
|
771
|
+
|
|
772
|
+
if not self.artifact_service:
|
|
773
|
+
log.warning(
|
|
774
|
+
"%s Artifact service not configured. Cannot save outbound artifacts.",
|
|
775
|
+
log_identifier,
|
|
776
|
+
)
|
|
777
|
+
return saved_artifacts_manifest
|
|
778
|
+
|
|
779
|
+
for artifact in artifacts_to_process:
|
|
780
|
+
contextual_description = "\n".join(
|
|
781
|
+
[
|
|
782
|
+
a2a.get_text_from_text_part(part.root)
|
|
783
|
+
for part in artifact.parts
|
|
784
|
+
if a2a.is_text_part(part)
|
|
785
|
+
]
|
|
786
|
+
)
|
|
787
|
+
|
|
788
|
+
for i, part_container in enumerate(artifact.parts):
|
|
789
|
+
part = part_container.root
|
|
790
|
+
if (
|
|
791
|
+
a2a.is_file_part(part_container)
|
|
792
|
+
and a2a.is_file_part_bytes(part)
|
|
793
|
+
and a2a.get_bytes_from_file_part(part)
|
|
794
|
+
):
|
|
795
|
+
file_part = part
|
|
796
|
+
file_content = file_part.file
|
|
797
|
+
log.info(
|
|
798
|
+
"%s Found outbound artifact '%s' with byte content. Saving...",
|
|
799
|
+
log_identifier,
|
|
800
|
+
file_content.name,
|
|
801
|
+
)
|
|
802
|
+
|
|
803
|
+
metadata_to_save = artifact.metadata or {}
|
|
804
|
+
if artifact.description:
|
|
805
|
+
metadata_to_save["description"] = artifact.description
|
|
806
|
+
elif contextual_description:
|
|
807
|
+
metadata_to_save["description"] = contextual_description
|
|
808
|
+
else:
|
|
809
|
+
metadata_to_save["description"] = (
|
|
810
|
+
f"Artifact created by {agent_name}"
|
|
811
|
+
)
|
|
812
|
+
|
|
813
|
+
metadata_to_save["proxied_from_artifact_id"] = artifact.artifact_id
|
|
814
|
+
user_id = task_context.a2a_context.get("user_id", "default_user")
|
|
815
|
+
session_id = task_context.a2a_context.get("session_id")
|
|
816
|
+
|
|
817
|
+
# Get file content using facade helpers
|
|
818
|
+
content_bytes = a2a.get_bytes_from_file_part(file_part)
|
|
819
|
+
filename = a2a.get_filename_from_file_part(file_part)
|
|
820
|
+
mime_type = a2a.get_mimetype_from_file_part(file_part)
|
|
821
|
+
|
|
822
|
+
save_result = await save_artifact_with_metadata(
|
|
823
|
+
artifact_service=self.artifact_service,
|
|
824
|
+
app_name=agent_name,
|
|
825
|
+
user_id=user_id,
|
|
826
|
+
session_id=session_id,
|
|
827
|
+
filename=filename,
|
|
828
|
+
content_bytes=content_bytes,
|
|
829
|
+
mime_type=mime_type,
|
|
830
|
+
metadata_dict=metadata_to_save,
|
|
831
|
+
timestamp=datetime.now(timezone.utc),
|
|
832
|
+
)
|
|
833
|
+
|
|
834
|
+
if save_result.get("status") in ["success", "partial_success"]:
|
|
835
|
+
data_version = save_result.get("data_version")
|
|
836
|
+
saved_uri = format_artifact_uri(
|
|
837
|
+
app_name=agent_name,
|
|
838
|
+
user_id=user_id,
|
|
839
|
+
session_id=session_id,
|
|
840
|
+
filename=filename,
|
|
841
|
+
version=data_version,
|
|
842
|
+
)
|
|
843
|
+
|
|
844
|
+
new_file_part = a2a.create_file_part_from_uri(
|
|
845
|
+
uri=saved_uri,
|
|
846
|
+
name=filename,
|
|
847
|
+
mime_type=mime_type,
|
|
848
|
+
metadata=a2a.get_metadata_from_part(file_part),
|
|
849
|
+
)
|
|
850
|
+
from a2a.types import Part
|
|
851
|
+
|
|
852
|
+
artifact.parts[i] = Part(root=new_file_part)
|
|
853
|
+
|
|
854
|
+
saved_artifacts_manifest.append(
|
|
855
|
+
{"filename": filename, "version": data_version}
|
|
856
|
+
)
|
|
857
|
+
log.info(
|
|
858
|
+
"%s Saved artifact '%s' as version %d. URI: %s",
|
|
859
|
+
log_identifier,
|
|
860
|
+
filename,
|
|
861
|
+
data_version,
|
|
862
|
+
saved_uri,
|
|
863
|
+
)
|
|
864
|
+
else:
|
|
865
|
+
log.error(
|
|
866
|
+
"%s Failed to save artifact '%s': %s",
|
|
867
|
+
log_identifier,
|
|
868
|
+
filename,
|
|
869
|
+
save_result.get("message"),
|
|
870
|
+
)
|
|
871
|
+
|
|
872
|
+
return saved_artifacts_manifest
|
|
873
|
+
|
|
874
|
+
async def _process_downstream_response(
|
|
875
|
+
self,
|
|
876
|
+
event: Union[
|
|
877
|
+
tuple, Message, Task, TaskStatusUpdateEvent, TaskArtifactUpdateEvent
|
|
878
|
+
],
|
|
879
|
+
task_context: ProxyTaskContext,
|
|
880
|
+
client: Client,
|
|
881
|
+
agent_name: str,
|
|
882
|
+
) -> None:
|
|
883
|
+
"""
|
|
884
|
+
Processes a single event from the downstream agent.
|
|
885
|
+
|
|
886
|
+
When using the normal client (non-streaming), events are:
|
|
887
|
+
- A ClientEvent tuple: (Task, Optional[UpdateEvent])
|
|
888
|
+
- A Message object (for direct responses)
|
|
889
|
+
|
|
890
|
+
When using transport directly (streaming workaround), events are raw:
|
|
891
|
+
- Task, TaskStatusUpdateEvent, TaskArtifactUpdateEvent, or Message objects
|
|
892
|
+
"""
|
|
893
|
+
log_identifier = (
|
|
894
|
+
f"{self.log_identifier}[ProcessResponse:{task_context.task_id}]"
|
|
895
|
+
)
|
|
896
|
+
|
|
897
|
+
# Use facade helpers to determine event type
|
|
898
|
+
event_payload = None
|
|
899
|
+
|
|
900
|
+
# Handle raw transport events (from streaming workaround)
|
|
901
|
+
if isinstance(event, (Task, TaskStatusUpdateEvent, TaskArtifactUpdateEvent)):
|
|
902
|
+
event_payload = event
|
|
903
|
+
log.debug(
|
|
904
|
+
"%s Received raw transport event: %s",
|
|
905
|
+
log_identifier,
|
|
906
|
+
type(event).__name__,
|
|
907
|
+
)
|
|
908
|
+
elif a2a.is_client_event(event):
|
|
909
|
+
# Unpack the ClientEvent tuple
|
|
910
|
+
task, update_event = a2a.unpack_client_event(event)
|
|
911
|
+
# If there's an update event, that's what we should process
|
|
912
|
+
# The task is just context; the update is the actual event
|
|
913
|
+
if update_event is not None:
|
|
914
|
+
event_payload = update_event
|
|
915
|
+
log.debug(
|
|
916
|
+
"%s Received ClientEvent with update: %s (task state: %s)",
|
|
917
|
+
log_identifier,
|
|
918
|
+
type(update_event).__name__,
|
|
919
|
+
task.status.state if task.status else "unknown",
|
|
920
|
+
)
|
|
921
|
+
else:
|
|
922
|
+
# No update event means this is the final task state
|
|
923
|
+
event_payload = task
|
|
924
|
+
log.debug(
|
|
925
|
+
"%s Received ClientEvent with final task state: %s",
|
|
926
|
+
log_identifier,
|
|
927
|
+
task.status.state if task.status else "unknown",
|
|
928
|
+
)
|
|
929
|
+
elif a2a.is_message_object(event):
|
|
930
|
+
# Direct Message response
|
|
931
|
+
event_payload = event
|
|
932
|
+
log.debug(
|
|
933
|
+
"%s Received direct Message response",
|
|
934
|
+
log_identifier,
|
|
935
|
+
)
|
|
936
|
+
else:
|
|
937
|
+
log.warning(
|
|
938
|
+
"%s Received unexpected event type: %s",
|
|
939
|
+
log_identifier,
|
|
940
|
+
type(event).__name__,
|
|
941
|
+
)
|
|
942
|
+
return
|
|
943
|
+
|
|
944
|
+
if not event_payload:
|
|
945
|
+
log.warning(
|
|
946
|
+
"%s Received an event with no processable payload: %s",
|
|
947
|
+
log_identifier,
|
|
948
|
+
event,
|
|
949
|
+
)
|
|
950
|
+
return
|
|
951
|
+
|
|
952
|
+
produced_artifacts = await self._handle_outbound_artifacts(
|
|
953
|
+
event_payload, task_context, agent_name
|
|
954
|
+
)
|
|
955
|
+
|
|
956
|
+
# Add produced_artifacts to metadata if any artifacts were processed
|
|
957
|
+
if produced_artifacts and isinstance(
|
|
958
|
+
event_payload, (Task, TaskStatusUpdateEvent)
|
|
959
|
+
):
|
|
960
|
+
if not event_payload.metadata:
|
|
961
|
+
event_payload.metadata = {}
|
|
962
|
+
event_payload.metadata["produced_artifacts"] = produced_artifacts
|
|
963
|
+
log.info(
|
|
964
|
+
"%s Added manifest of %d produced artifacts to %s metadata.",
|
|
965
|
+
log_identifier,
|
|
966
|
+
len(produced_artifacts),
|
|
967
|
+
type(event_payload).__name__,
|
|
968
|
+
)
|
|
969
|
+
|
|
970
|
+
# Add agent_name to metadata for all response types
|
|
971
|
+
if isinstance(
|
|
972
|
+
event_payload, (Task, TaskStatusUpdateEvent, TaskArtifactUpdateEvent)
|
|
973
|
+
):
|
|
974
|
+
if not event_payload.metadata:
|
|
975
|
+
event_payload.metadata = {}
|
|
976
|
+
event_payload.metadata["agent_name"] = agent_name
|
|
977
|
+
log.debug(
|
|
978
|
+
"%s Added agent_name '%s' to %s metadata.",
|
|
979
|
+
log_identifier,
|
|
980
|
+
agent_name,
|
|
981
|
+
type(event_payload).__name__,
|
|
982
|
+
)
|
|
983
|
+
|
|
984
|
+
original_task_id = task_context.task_id
|
|
985
|
+
if hasattr(event_payload, "task_id") and event_payload.task_id:
|
|
986
|
+
event_payload.task_id = original_task_id
|
|
987
|
+
elif hasattr(event_payload, "id") and event_payload.id:
|
|
988
|
+
event_payload.id = original_task_id
|
|
989
|
+
|
|
990
|
+
if isinstance(event_payload, Task) and event_payload.artifacts:
|
|
991
|
+
text_only_artifacts_content = []
|
|
992
|
+
remaining_artifacts = []
|
|
993
|
+
for artifact in event_payload.artifacts:
|
|
994
|
+
if a2a.is_text_only_artifact(artifact):
|
|
995
|
+
text_only_artifacts_content.extend(
|
|
996
|
+
a2a.get_text_content_from_artifact(artifact)
|
|
997
|
+
)
|
|
998
|
+
else:
|
|
999
|
+
remaining_artifacts.append(artifact)
|
|
1000
|
+
|
|
1001
|
+
if text_only_artifacts_content:
|
|
1002
|
+
log.info(
|
|
1003
|
+
"%s Consolidating %d text-only artifacts into status message.",
|
|
1004
|
+
log_identifier,
|
|
1005
|
+
len(event_payload.artifacts) - len(remaining_artifacts),
|
|
1006
|
+
)
|
|
1007
|
+
event_payload.artifacts = (
|
|
1008
|
+
remaining_artifacts if remaining_artifacts else None
|
|
1009
|
+
)
|
|
1010
|
+
|
|
1011
|
+
consolidated_text = "\n".join(text_only_artifacts_content)
|
|
1012
|
+
summary_message_part = TextPart(
|
|
1013
|
+
text=(
|
|
1014
|
+
"The following text-only artifacts were returned and have been consolidated into this message:\n\n---\n\n"
|
|
1015
|
+
f"{consolidated_text}"
|
|
1016
|
+
)
|
|
1017
|
+
)
|
|
1018
|
+
|
|
1019
|
+
if not event_payload.status.message:
|
|
1020
|
+
from a2a.types import Part
|
|
1021
|
+
|
|
1022
|
+
event_payload.status.message = Message(
|
|
1023
|
+
message_id=str(uuid.uuid4()),
|
|
1024
|
+
role="agent",
|
|
1025
|
+
parts=[Part(root=summary_message_part)],
|
|
1026
|
+
)
|
|
1027
|
+
else:
|
|
1028
|
+
from a2a.types import Part
|
|
1029
|
+
|
|
1030
|
+
event_payload.status.message.parts.append(
|
|
1031
|
+
Part(root=summary_message_part)
|
|
1032
|
+
)
|
|
1033
|
+
|
|
1034
|
+
if isinstance(event_payload, (Task, TaskStatusUpdateEvent)):
|
|
1035
|
+
if isinstance(event_payload, Task):
|
|
1036
|
+
await self._publish_final_response(
|
|
1037
|
+
event_payload, task_context.a2a_context
|
|
1038
|
+
)
|
|
1039
|
+
else:
|
|
1040
|
+
await self._publish_status_update(
|
|
1041
|
+
event_payload, task_context.a2a_context
|
|
1042
|
+
)
|
|
1043
|
+
elif isinstance(event_payload, TaskArtifactUpdateEvent):
|
|
1044
|
+
await self._publish_artifact_update(event_payload, task_context.a2a_context)
|
|
1045
|
+
elif isinstance(event_payload, Message):
|
|
1046
|
+
log.info(
|
|
1047
|
+
"%s Received a direct Message response. Wrapping in a completed Task.",
|
|
1048
|
+
log_identifier,
|
|
1049
|
+
)
|
|
1050
|
+
final_task = Task(
|
|
1051
|
+
id=task_context.task_id,
|
|
1052
|
+
context_id=task_context.a2a_context.get("session_id"),
|
|
1053
|
+
status=TaskStatus(state=TaskState.completed, message=event_payload),
|
|
1054
|
+
)
|
|
1055
|
+
|
|
1056
|
+
# Add produced_artifacts metadata to the wrapped Task if any artifacts were processed
|
|
1057
|
+
if produced_artifacts:
|
|
1058
|
+
final_task.metadata = {"produced_artifacts": produced_artifacts}
|
|
1059
|
+
log.info(
|
|
1060
|
+
"%s Added manifest of %d produced artifacts to wrapped Task metadata.",
|
|
1061
|
+
log_identifier,
|
|
1062
|
+
len(produced_artifacts),
|
|
1063
|
+
)
|
|
1064
|
+
|
|
1065
|
+
await self._publish_final_response(final_task, task_context.a2a_context)
|
|
1066
|
+
else:
|
|
1067
|
+
log.warning(
|
|
1068
|
+
f"Received unhandled response payload type: {type(event_payload)}"
|
|
1069
|
+
)
|
|
1070
|
+
|
|
1071
|
+
def clear_client_cache(self):
|
|
1072
|
+
"""
|
|
1073
|
+
Clears all cached A2A clients and OAuth tokens.
|
|
1074
|
+
This is useful for testing when authentication configuration changes.
|
|
1075
|
+
"""
|
|
1076
|
+
num_clients = len(self._a2a_clients)
|
|
1077
|
+
self._a2a_clients.clear()
|
|
1078
|
+
log.info(
|
|
1079
|
+
"%s Cleared all cached A2A clients (%d clients removed).",
|
|
1080
|
+
self.log_identifier,
|
|
1081
|
+
num_clients,
|
|
1082
|
+
)
|
|
1083
|
+
|
|
1084
|
+
def cleanup(self):
|
|
1085
|
+
"""Cleans up resources on component shutdown."""
|
|
1086
|
+
log.info("%s Cleaning up A2A proxy component resources...", self.log_identifier)
|
|
1087
|
+
|
|
1088
|
+
# Token cache cleanup:
|
|
1089
|
+
# - OAuth2TokenCache is automatically garbage collected
|
|
1090
|
+
# - No persistent state to clean up
|
|
1091
|
+
# - Tokens are lost on component restart (by design)
|
|
1092
|
+
|
|
1093
|
+
async def _async_cleanup():
|
|
1094
|
+
# Close all created clients using public API
|
|
1095
|
+
for cache_key, client in self._a2a_clients.items():
|
|
1096
|
+
agent_name, session_id = cache_key
|
|
1097
|
+
log.info(
|
|
1098
|
+
"%s Closing client for agent '%s' session '%s'",
|
|
1099
|
+
self.log_identifier,
|
|
1100
|
+
agent_name,
|
|
1101
|
+
session_id,
|
|
1102
|
+
)
|
|
1103
|
+
await client.close()
|
|
1104
|
+
self._a2a_clients.clear()
|
|
1105
|
+
|
|
1106
|
+
if self._async_loop and self._async_loop.is_running():
|
|
1107
|
+
future = asyncio.run_coroutine_threadsafe(
|
|
1108
|
+
_async_cleanup(), self._async_loop
|
|
1109
|
+
)
|
|
1110
|
+
try:
|
|
1111
|
+
future.result(timeout=5)
|
|
1112
|
+
except Exception as e:
|
|
1113
|
+
log.error("%s Error during async cleanup: %s", self.log_identifier, e)
|
|
1114
|
+
|
|
1115
|
+
super().cleanup()
|