mcp-proxy-oauth-dcr 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mcp_proxy/__init__.py +89 -0
- mcp_proxy/__main__.py +340 -0
- mcp_proxy/auth/__init__.py +8 -0
- mcp_proxy/auth/manager.py +908 -0
- mcp_proxy/config/__init__.py +8 -0
- mcp_proxy/config/manager.py +200 -0
- mcp_proxy/exceptions.py +186 -0
- mcp_proxy/http/__init__.py +9 -0
- mcp_proxy/http/authenticated_client.py +388 -0
- mcp_proxy/http/client.py +997 -0
- mcp_proxy/logging_config.py +71 -0
- mcp_proxy/models.py +259 -0
- mcp_proxy/protocols.py +122 -0
- mcp_proxy/proxy.py +586 -0
- mcp_proxy/stdio/__init__.py +31 -0
- mcp_proxy/stdio/interface.py +580 -0
- mcp_proxy/stdio/jsonrpc.py +371 -0
- mcp_proxy/translator/__init__.py +11 -0
- mcp_proxy/translator/translator.py +691 -0
- mcp_proxy_oauth_dcr-0.1.0.dist-info/METADATA +167 -0
- mcp_proxy_oauth_dcr-0.1.0.dist-info/RECORD +25 -0
- mcp_proxy_oauth_dcr-0.1.0.dist-info/WHEEL +5 -0
- mcp_proxy_oauth_dcr-0.1.0.dist-info/entry_points.txt +2 -0
- mcp_proxy_oauth_dcr-0.1.0.dist-info/licenses/LICENSE +21 -0
- mcp_proxy_oauth_dcr-0.1.0.dist-info/top_level.txt +1 -0
mcp_proxy/http/client.py
ADDED
|
@@ -0,0 +1,997 @@
|
|
|
1
|
+
"""HTTP client implementation for MCP Proxy.
|
|
2
|
+
|
|
3
|
+
This module provides an aiohttp-based HTTP client with support for:
|
|
4
|
+
- Standard HTTP requests (POST, GET)
|
|
5
|
+
- Server-Sent Events (SSE) streaming
|
|
6
|
+
- Session management via MCP-Session-Id headers
|
|
7
|
+
- OAuth Bearer token authentication
|
|
8
|
+
- Connection pooling for efficient resource utilization
|
|
9
|
+
- Exponential backoff retry logic for connection resilience
|
|
10
|
+
- Graceful handling of partial message transmission
|
|
11
|
+
- Comprehensive error logging and diagnostics
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
import asyncio
|
|
15
|
+
import logging
|
|
16
|
+
import random
|
|
17
|
+
from dataclasses import dataclass, field
|
|
18
|
+
from typing import Optional, AsyncIterator, Dict, Any, Callable, Set, TypeVar, List
|
|
19
|
+
from contextlib import asynccontextmanager
|
|
20
|
+
from functools import wraps
|
|
21
|
+
|
|
22
|
+
import aiohttp
|
|
23
|
+
from aiohttp import ClientSession, ClientTimeout, TCPConnector
|
|
24
|
+
|
|
25
|
+
from ..models import HttpMcpRequest, HttpMcpResponse
|
|
26
|
+
from ..exceptions import (
|
|
27
|
+
ConnectionError,
|
|
28
|
+
ConnectionTimeoutError,
|
|
29
|
+
StreamError,
|
|
30
|
+
HttpError,
|
|
31
|
+
NetworkError,
|
|
32
|
+
)
|
|
33
|
+
from ..logging_config import get_logger
|
|
34
|
+
|
|
35
|
+
logger = get_logger(__name__)
|
|
36
|
+
|
|
37
|
+
# Type variable for generic retry decorator
|
|
38
|
+
T = TypeVar('T')
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
@dataclass
|
|
42
|
+
class RetryConfig:
|
|
43
|
+
"""Configuration for retry behavior with exponential backoff.
|
|
44
|
+
|
|
45
|
+
Attributes:
|
|
46
|
+
max_retries: Maximum number of retry attempts (default: 3)
|
|
47
|
+
base_delay: Initial delay in seconds before first retry (default: 1.0)
|
|
48
|
+
max_delay: Maximum delay in seconds between retries (default: 30.0)
|
|
49
|
+
exponential_base: Base for exponential backoff calculation (default: 2.0)
|
|
50
|
+
jitter: Whether to add random jitter to delays (default: True)
|
|
51
|
+
retryable_status_codes: HTTP status codes that should trigger a retry
|
|
52
|
+
"""
|
|
53
|
+
max_retries: int = 3
|
|
54
|
+
base_delay: float = 1.0
|
|
55
|
+
max_delay: float = 30.0
|
|
56
|
+
exponential_base: float = 2.0
|
|
57
|
+
jitter: bool = True
|
|
58
|
+
retryable_status_codes: Set[int] = field(default_factory=lambda: {
|
|
59
|
+
408, # Request Timeout
|
|
60
|
+
429, # Too Many Requests
|
|
61
|
+
500, # Internal Server Error
|
|
62
|
+
502, # Bad Gateway
|
|
63
|
+
503, # Service Unavailable
|
|
64
|
+
504, # Gateway Timeout
|
|
65
|
+
})
|
|
66
|
+
|
|
67
|
+
def calculate_delay(self, attempt: int) -> float:
|
|
68
|
+
"""Calculate delay for a given retry attempt using exponential backoff.
|
|
69
|
+
|
|
70
|
+
Args:
|
|
71
|
+
attempt: The retry attempt number (0-indexed)
|
|
72
|
+
|
|
73
|
+
Returns:
|
|
74
|
+
Delay in seconds before the next retry
|
|
75
|
+
"""
|
|
76
|
+
# Calculate exponential delay
|
|
77
|
+
delay = self.base_delay * (self.exponential_base ** attempt)
|
|
78
|
+
|
|
79
|
+
# Cap at max delay
|
|
80
|
+
delay = min(delay, self.max_delay)
|
|
81
|
+
|
|
82
|
+
# Add jitter if enabled (±25% randomization)
|
|
83
|
+
if self.jitter:
|
|
84
|
+
jitter_range = delay * 0.25
|
|
85
|
+
delay = delay + random.uniform(-jitter_range, jitter_range)
|
|
86
|
+
delay = max(0.1, delay) # Ensure minimum delay
|
|
87
|
+
|
|
88
|
+
return delay
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
# Exceptions that are considered retryable (transient failures)
|
|
92
|
+
RETRYABLE_EXCEPTIONS = (
|
|
93
|
+
ConnectionError,
|
|
94
|
+
ConnectionTimeoutError,
|
|
95
|
+
asyncio.TimeoutError,
|
|
96
|
+
aiohttp.ClientConnectorError,
|
|
97
|
+
aiohttp.ServerDisconnectedError,
|
|
98
|
+
aiohttp.ServerTimeoutError,
|
|
99
|
+
aiohttp.ClientOSError,
|
|
100
|
+
)
|
|
101
|
+
|
|
102
|
+
# Exceptions that should NOT be retried (permanent failures)
|
|
103
|
+
NON_RETRYABLE_EXCEPTIONS = (
|
|
104
|
+
aiohttp.InvalidURL,
|
|
105
|
+
aiohttp.ClientSSLError,
|
|
106
|
+
ValueError,
|
|
107
|
+
TypeError,
|
|
108
|
+
)
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
def is_retryable_error(error: Exception, retry_config: RetryConfig) -> bool:
|
|
112
|
+
"""Determine if an error is retryable.
|
|
113
|
+
|
|
114
|
+
Args:
|
|
115
|
+
error: The exception to check
|
|
116
|
+
retry_config: Retry configuration with retryable status codes
|
|
117
|
+
|
|
118
|
+
Returns:
|
|
119
|
+
True if the error is retryable, False otherwise
|
|
120
|
+
"""
|
|
121
|
+
# Check for non-retryable exceptions first
|
|
122
|
+
if isinstance(error, NON_RETRYABLE_EXCEPTIONS):
|
|
123
|
+
return False
|
|
124
|
+
|
|
125
|
+
# Check for retryable exceptions
|
|
126
|
+
if isinstance(error, RETRYABLE_EXCEPTIONS):
|
|
127
|
+
return True
|
|
128
|
+
|
|
129
|
+
# Check for HTTP errors with retryable status codes
|
|
130
|
+
if isinstance(error, HttpError):
|
|
131
|
+
return error.status_code in retry_config.retryable_status_codes
|
|
132
|
+
|
|
133
|
+
# Check for aiohttp response errors with status codes
|
|
134
|
+
if isinstance(error, aiohttp.ClientResponseError):
|
|
135
|
+
return error.status in retry_config.retryable_status_codes
|
|
136
|
+
|
|
137
|
+
# Default: don't retry unknown errors
|
|
138
|
+
return False
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
class PartialMessageBuffer:
|
|
142
|
+
"""Buffer for handling partial message transmission.
|
|
143
|
+
|
|
144
|
+
This class accumulates partial data chunks and provides methods
|
|
145
|
+
to extract complete messages when they are fully received.
|
|
146
|
+
"""
|
|
147
|
+
|
|
148
|
+
def __init__(self, max_buffer_size: int = 10 * 1024 * 1024): # 10MB default
|
|
149
|
+
"""Initialize the partial message buffer.
|
|
150
|
+
|
|
151
|
+
Args:
|
|
152
|
+
max_buffer_size: Maximum buffer size in bytes
|
|
153
|
+
"""
|
|
154
|
+
self._buffer: bytes = b""
|
|
155
|
+
self._max_buffer_size = max_buffer_size
|
|
156
|
+
|
|
157
|
+
def append(self, data: bytes) -> None:
|
|
158
|
+
"""Append data to the buffer.
|
|
159
|
+
|
|
160
|
+
Args:
|
|
161
|
+
data: Data bytes to append
|
|
162
|
+
|
|
163
|
+
Raises:
|
|
164
|
+
StreamError: If buffer exceeds maximum size
|
|
165
|
+
"""
|
|
166
|
+
if len(self._buffer) + len(data) > self._max_buffer_size:
|
|
167
|
+
raise StreamError(
|
|
168
|
+
f"Buffer overflow: exceeded maximum size of {self._max_buffer_size} bytes"
|
|
169
|
+
)
|
|
170
|
+
self._buffer += data
|
|
171
|
+
|
|
172
|
+
def extract_complete_messages(self, delimiter: bytes = b"\n\n") -> List[bytes]:
|
|
173
|
+
"""Extract complete messages from the buffer.
|
|
174
|
+
|
|
175
|
+
Args:
|
|
176
|
+
delimiter: Message delimiter (default: double newline for SSE)
|
|
177
|
+
|
|
178
|
+
Returns:
|
|
179
|
+
List of complete messages
|
|
180
|
+
"""
|
|
181
|
+
messages = []
|
|
182
|
+
while delimiter in self._buffer:
|
|
183
|
+
message, self._buffer = self._buffer.split(delimiter, 1)
|
|
184
|
+
if message:
|
|
185
|
+
messages.append(message)
|
|
186
|
+
return messages
|
|
187
|
+
|
|
188
|
+
def get_remaining(self) -> bytes:
|
|
189
|
+
"""Get any remaining data in the buffer.
|
|
190
|
+
|
|
191
|
+
Returns:
|
|
192
|
+
Remaining buffer contents
|
|
193
|
+
"""
|
|
194
|
+
remaining = self._buffer
|
|
195
|
+
self._buffer = b""
|
|
196
|
+
return remaining
|
|
197
|
+
|
|
198
|
+
def clear(self) -> None:
|
|
199
|
+
"""Clear the buffer."""
|
|
200
|
+
self._buffer = b""
|
|
201
|
+
|
|
202
|
+
@property
|
|
203
|
+
def size(self) -> int:
|
|
204
|
+
"""Get current buffer size in bytes."""
|
|
205
|
+
return len(self._buffer)
|
|
206
|
+
|
|
207
|
+
|
|
208
|
+
@dataclass
|
|
209
|
+
class SSEEvent:
|
|
210
|
+
"""Represents a Server-Sent Event."""
|
|
211
|
+
event: str = "message"
|
|
212
|
+
data: str = ""
|
|
213
|
+
id: Optional[str] = None
|
|
214
|
+
retry: Optional[int] = None
|
|
215
|
+
|
|
216
|
+
|
|
217
|
+
class HttpClientImpl:
|
|
218
|
+
"""HTTP client for communicating with backend MCP servers.
|
|
219
|
+
|
|
220
|
+
This client supports:
|
|
221
|
+
- Standard HTTP requests with automatic retries
|
|
222
|
+
- Server-Sent Events (SSE) streaming for real-time updates
|
|
223
|
+
- Session management via MCP-Session-Id headers
|
|
224
|
+
- OAuth Bearer token authentication
|
|
225
|
+
- Connection pooling and resource management
|
|
226
|
+
- Exponential backoff retry logic for connection resilience
|
|
227
|
+
- Graceful handling of partial message transmission
|
|
228
|
+
|
|
229
|
+
Attributes:
|
|
230
|
+
base_url: Base URL for the HTTP MCP server
|
|
231
|
+
timeout: Request timeout in seconds
|
|
232
|
+
max_connections: Maximum number of concurrent connections
|
|
233
|
+
auth_token: OAuth Bearer token for authentication
|
|
234
|
+
session_id: Current MCP session ID
|
|
235
|
+
retry_config: Configuration for retry behavior
|
|
236
|
+
"""
|
|
237
|
+
|
|
238
|
+
MCP_SESSION_HEADER = "Mcp-Session-Id"
|
|
239
|
+
AUTHORIZATION_HEADER = "Authorization"
|
|
240
|
+
CONTENT_TYPE_HEADER = "Content-Type"
|
|
241
|
+
ACCEPT_HEADER = "Accept"
|
|
242
|
+
|
|
243
|
+
SSE_CONTENT_TYPE = "text/event-stream"
|
|
244
|
+
JSON_CONTENT_TYPE = "application/json"
|
|
245
|
+
|
|
246
|
+
def __init__(
|
|
247
|
+
self,
|
|
248
|
+
base_url: str,
|
|
249
|
+
timeout: int = 30,
|
|
250
|
+
max_connections: int = 10,
|
|
251
|
+
max_connections_per_host: int = 5,
|
|
252
|
+
retry_config: Optional[RetryConfig] = None,
|
|
253
|
+
):
|
|
254
|
+
"""Initialize the HTTP client.
|
|
255
|
+
|
|
256
|
+
Args:
|
|
257
|
+
base_url: Base URL for the HTTP MCP server
|
|
258
|
+
timeout: Request timeout in seconds (default: 30)
|
|
259
|
+
max_connections: Maximum total connections in pool (default: 10)
|
|
260
|
+
max_connections_per_host: Maximum connections per host (default: 5)
|
|
261
|
+
retry_config: Configuration for retry behavior (default: RetryConfig())
|
|
262
|
+
"""
|
|
263
|
+
self._base_url = base_url.rstrip("/")
|
|
264
|
+
self._timeout = ClientTimeout(total=timeout)
|
|
265
|
+
self._max_connections = max_connections
|
|
266
|
+
self._max_connections_per_host = max_connections_per_host
|
|
267
|
+
self._retry_config = retry_config or RetryConfig()
|
|
268
|
+
|
|
269
|
+
self._auth_token: Optional[str] = None
|
|
270
|
+
self._session_id: Optional[str] = None
|
|
271
|
+
self._session: Optional[ClientSession] = None
|
|
272
|
+
self._connector: Optional[TCPConnector] = None
|
|
273
|
+
self._is_closed = False
|
|
274
|
+
|
|
275
|
+
# Connection failure tracking for diagnostics
|
|
276
|
+
self._connection_failures: List[Dict[str, Any]] = []
|
|
277
|
+
self._last_successful_request: Optional[float] = None
|
|
278
|
+
|
|
279
|
+
logger.info(
|
|
280
|
+
"HTTP client initialized",
|
|
281
|
+
base_url=self._base_url,
|
|
282
|
+
timeout=timeout,
|
|
283
|
+
max_connections=max_connections,
|
|
284
|
+
max_connections_per_host=max_connections_per_host,
|
|
285
|
+
max_retries=self._retry_config.max_retries,
|
|
286
|
+
)
|
|
287
|
+
|
|
288
|
+
@property
|
|
289
|
+
def base_url(self) -> str:
|
|
290
|
+
"""Get the base URL."""
|
|
291
|
+
return self._base_url
|
|
292
|
+
|
|
293
|
+
@property
|
|
294
|
+
def session_id(self) -> Optional[str]:
|
|
295
|
+
"""Get the current session ID."""
|
|
296
|
+
return self._session_id
|
|
297
|
+
|
|
298
|
+
@session_id.setter
|
|
299
|
+
def session_id(self, value: Optional[str]) -> None:
|
|
300
|
+
"""Set the session ID."""
|
|
301
|
+
self._session_id = value
|
|
302
|
+
logger.debug("Session ID set to: %s", value)
|
|
303
|
+
|
|
304
|
+
@property
|
|
305
|
+
def auth_token(self) -> Optional[str]:
|
|
306
|
+
"""Get the current auth token."""
|
|
307
|
+
return self._auth_token
|
|
308
|
+
|
|
309
|
+
@property
|
|
310
|
+
def retry_config(self) -> RetryConfig:
|
|
311
|
+
"""Get the retry configuration."""
|
|
312
|
+
return self._retry_config
|
|
313
|
+
|
|
314
|
+
@retry_config.setter
|
|
315
|
+
def retry_config(self, value: RetryConfig) -> None:
|
|
316
|
+
"""Set the retry configuration."""
|
|
317
|
+
self._retry_config = value
|
|
318
|
+
logger.debug("Retry config updated: max_retries=%d", value.max_retries)
|
|
319
|
+
|
|
320
|
+
@property
|
|
321
|
+
def is_connected(self) -> bool:
|
|
322
|
+
"""Check if the client has an active session."""
|
|
323
|
+
return self._session is not None and not self._session.closed
|
|
324
|
+
|
|
325
|
+
async def _ensure_session(self) -> ClientSession:
|
|
326
|
+
"""Ensure an aiohttp session exists and return it.
|
|
327
|
+
|
|
328
|
+
Creates a new session with connection pooling if one doesn't exist.
|
|
329
|
+
|
|
330
|
+
Returns:
|
|
331
|
+
Active ClientSession instance
|
|
332
|
+
|
|
333
|
+
Raises:
|
|
334
|
+
ConnectionError: If client has been closed
|
|
335
|
+
"""
|
|
336
|
+
if self._is_closed:
|
|
337
|
+
raise ConnectionError("HTTP client has been closed")
|
|
338
|
+
|
|
339
|
+
if self._session is None or self._session.closed:
|
|
340
|
+
self._connector = TCPConnector(
|
|
341
|
+
limit=self._max_connections,
|
|
342
|
+
limit_per_host=self._max_connections_per_host,
|
|
343
|
+
enable_cleanup_closed=True,
|
|
344
|
+
)
|
|
345
|
+
self._session = ClientSession(
|
|
346
|
+
timeout=self._timeout,
|
|
347
|
+
connector=self._connector,
|
|
348
|
+
)
|
|
349
|
+
logger.debug("Created new aiohttp session with connection pooling")
|
|
350
|
+
|
|
351
|
+
return self._session
|
|
352
|
+
|
|
353
|
+
def set_auth_token(self, token: str) -> None:
|
|
354
|
+
"""Set the OAuth Bearer token for authentication.
|
|
355
|
+
|
|
356
|
+
Args:
|
|
357
|
+
token: OAuth access token
|
|
358
|
+
"""
|
|
359
|
+
self._auth_token = token
|
|
360
|
+
logger.debug("Auth token updated")
|
|
361
|
+
|
|
362
|
+
def clear_auth_token(self) -> None:
|
|
363
|
+
"""Clear the current auth token."""
|
|
364
|
+
self._auth_token = None
|
|
365
|
+
logger.debug("Auth token cleared")
|
|
366
|
+
|
|
367
|
+
def _build_headers(
|
|
368
|
+
self,
|
|
369
|
+
additional_headers: Optional[Dict[str, str]] = None,
|
|
370
|
+
include_session: bool = True,
|
|
371
|
+
content_type: Optional[str] = None,
|
|
372
|
+
accept: Optional[str] = None,
|
|
373
|
+
) -> Dict[str, str]:
|
|
374
|
+
"""Build request headers with authentication and session info.
|
|
375
|
+
|
|
376
|
+
Args:
|
|
377
|
+
additional_headers: Extra headers to include
|
|
378
|
+
include_session: Whether to include session ID header
|
|
379
|
+
content_type: Content-Type header value
|
|
380
|
+
accept: Accept header value
|
|
381
|
+
|
|
382
|
+
Returns:
|
|
383
|
+
Dictionary of headers
|
|
384
|
+
"""
|
|
385
|
+
headers: Dict[str, str] = {}
|
|
386
|
+
|
|
387
|
+
# Add authentication header
|
|
388
|
+
if self._auth_token:
|
|
389
|
+
headers[self.AUTHORIZATION_HEADER] = f"Bearer {self._auth_token}"
|
|
390
|
+
|
|
391
|
+
# Add session ID header
|
|
392
|
+
if include_session and self._session_id:
|
|
393
|
+
headers[self.MCP_SESSION_HEADER] = self._session_id
|
|
394
|
+
|
|
395
|
+
# Add content type
|
|
396
|
+
if content_type:
|
|
397
|
+
headers[self.CONTENT_TYPE_HEADER] = content_type
|
|
398
|
+
|
|
399
|
+
# Add accept header
|
|
400
|
+
if accept:
|
|
401
|
+
headers[self.ACCEPT_HEADER] = accept
|
|
402
|
+
|
|
403
|
+
# Merge additional headers
|
|
404
|
+
if additional_headers:
|
|
405
|
+
headers.update(additional_headers)
|
|
406
|
+
|
|
407
|
+
return headers
|
|
408
|
+
|
|
409
|
+
def _extract_session_id(self, response_headers: Dict[str, str]) -> None:
|
|
410
|
+
"""Extract and store session ID from response headers.
|
|
411
|
+
|
|
412
|
+
Args:
|
|
413
|
+
response_headers: Response headers dictionary
|
|
414
|
+
"""
|
|
415
|
+
# Check for session ID in response (case-insensitive)
|
|
416
|
+
for key, value in response_headers.items():
|
|
417
|
+
if key.lower() == self.MCP_SESSION_HEADER.lower():
|
|
418
|
+
if value != self._session_id:
|
|
419
|
+
self._session_id = value
|
|
420
|
+
logger.info("Session ID updated from response: %s", value)
|
|
421
|
+
break
|
|
422
|
+
|
|
423
|
+
async def send_request(self, request: HttpMcpRequest) -> HttpMcpResponse:
|
|
424
|
+
"""Send an HTTP request to the MCP server with retry logic.
|
|
425
|
+
|
|
426
|
+
Implements exponential backoff retry for transient failures.
|
|
427
|
+
|
|
428
|
+
Args:
|
|
429
|
+
request: HTTP MCP request to send
|
|
430
|
+
|
|
431
|
+
Returns:
|
|
432
|
+
HTTP MCP response
|
|
433
|
+
|
|
434
|
+
Raises:
|
|
435
|
+
ConnectionError: If connection fails after all retries
|
|
436
|
+
ConnectionTimeoutError: If request times out after all retries
|
|
437
|
+
HttpError: If HTTP error occurs and is not retryable
|
|
438
|
+
"""
|
|
439
|
+
return await self._send_request_with_retry(request)
|
|
440
|
+
|
|
441
|
+
async def _send_request_with_retry(
|
|
442
|
+
self,
|
|
443
|
+
request: HttpMcpRequest,
|
|
444
|
+
retry_config: Optional[RetryConfig] = None,
|
|
445
|
+
) -> HttpMcpResponse:
|
|
446
|
+
"""Send request with exponential backoff retry logic.
|
|
447
|
+
|
|
448
|
+
Args:
|
|
449
|
+
request: HTTP MCP request to send
|
|
450
|
+
retry_config: Optional override for retry configuration
|
|
451
|
+
|
|
452
|
+
Returns:
|
|
453
|
+
HTTP MCP response
|
|
454
|
+
|
|
455
|
+
Raises:
|
|
456
|
+
ConnectionError: If connection fails after all retries
|
|
457
|
+
ConnectionTimeoutError: If request times out after all retries
|
|
458
|
+
HttpError: If HTTP error occurs and is not retryable
|
|
459
|
+
"""
|
|
460
|
+
config = retry_config or self._retry_config
|
|
461
|
+
last_error: Optional[Exception] = None
|
|
462
|
+
|
|
463
|
+
for attempt in range(config.max_retries + 1):
|
|
464
|
+
try:
|
|
465
|
+
response = await self._send_request_once(request)
|
|
466
|
+
|
|
467
|
+
# Record successful request
|
|
468
|
+
import time
|
|
469
|
+
self._last_successful_request = time.time()
|
|
470
|
+
|
|
471
|
+
return response
|
|
472
|
+
|
|
473
|
+
except Exception as e:
|
|
474
|
+
last_error = e
|
|
475
|
+
|
|
476
|
+
# Record failure for diagnostics
|
|
477
|
+
self._record_connection_failure(e, request, attempt)
|
|
478
|
+
|
|
479
|
+
# Check if we should retry
|
|
480
|
+
if attempt >= config.max_retries:
|
|
481
|
+
logger.error(
|
|
482
|
+
"Request failed after all retry attempts",
|
|
483
|
+
attempts=attempt + 1,
|
|
484
|
+
error=str(e),
|
|
485
|
+
error_type=type(e).__name__,
|
|
486
|
+
url=request.url,
|
|
487
|
+
method=request.method,
|
|
488
|
+
)
|
|
489
|
+
raise
|
|
490
|
+
|
|
491
|
+
if not is_retryable_error(e, config):
|
|
492
|
+
logger.error(
|
|
493
|
+
"Non-retryable error encountered",
|
|
494
|
+
error=str(e),
|
|
495
|
+
error_type=type(e).__name__,
|
|
496
|
+
url=request.url,
|
|
497
|
+
method=request.method,
|
|
498
|
+
)
|
|
499
|
+
raise
|
|
500
|
+
|
|
501
|
+
# Calculate delay and wait
|
|
502
|
+
delay = config.calculate_delay(attempt)
|
|
503
|
+
logger.warning(
|
|
504
|
+
"Request failed, retrying",
|
|
505
|
+
attempt=attempt + 1,
|
|
506
|
+
max_attempts=config.max_retries + 1,
|
|
507
|
+
retry_delay=delay,
|
|
508
|
+
error=str(e),
|
|
509
|
+
error_type=type(e).__name__,
|
|
510
|
+
url=request.url,
|
|
511
|
+
method=request.method,
|
|
512
|
+
)
|
|
513
|
+
await asyncio.sleep(delay)
|
|
514
|
+
|
|
515
|
+
# This should not be reached, but just in case
|
|
516
|
+
if last_error:
|
|
517
|
+
raise last_error
|
|
518
|
+
raise ConnectionError("Request failed with unknown error")
|
|
519
|
+
|
|
520
|
+
def _record_connection_failure(
|
|
521
|
+
self,
|
|
522
|
+
error: Exception,
|
|
523
|
+
request: HttpMcpRequest,
|
|
524
|
+
attempt: int
|
|
525
|
+
) -> None:
|
|
526
|
+
"""Record a connection failure for diagnostic tracking.
|
|
527
|
+
|
|
528
|
+
Args:
|
|
529
|
+
error: The exception that occurred
|
|
530
|
+
request: The request that failed
|
|
531
|
+
attempt: The attempt number
|
|
532
|
+
"""
|
|
533
|
+
import time
|
|
534
|
+
|
|
535
|
+
failure_record = {
|
|
536
|
+
"timestamp": time.time(),
|
|
537
|
+
"error_type": type(error).__name__,
|
|
538
|
+
"error_message": str(error),
|
|
539
|
+
"url": request.url,
|
|
540
|
+
"method": request.method,
|
|
541
|
+
"attempt": attempt,
|
|
542
|
+
}
|
|
543
|
+
self._connection_failures.append(failure_record)
|
|
544
|
+
|
|
545
|
+
# Keep only recent failures (last 100)
|
|
546
|
+
if len(self._connection_failures) > 100:
|
|
547
|
+
self._connection_failures = self._connection_failures[-100:]
|
|
548
|
+
|
|
549
|
+
# Log diagnostic info if many recent failures
|
|
550
|
+
if len(self._connection_failures) >= 10:
|
|
551
|
+
recent_failures = [
|
|
552
|
+
f for f in self._connection_failures
|
|
553
|
+
if time.time() - f["timestamp"] < 300 # Last 5 minutes
|
|
554
|
+
]
|
|
555
|
+
if len(recent_failures) >= 10:
|
|
556
|
+
self._log_connection_diagnostics(recent_failures)
|
|
557
|
+
|
|
558
|
+
async def _send_request_once(self, request: HttpMcpRequest) -> HttpMcpResponse:
|
|
559
|
+
"""Send a single HTTP request without retry logic.
|
|
560
|
+
|
|
561
|
+
Args:
|
|
562
|
+
request: HTTP MCP request to send
|
|
563
|
+
|
|
564
|
+
Returns:
|
|
565
|
+
HTTP MCP response
|
|
566
|
+
|
|
567
|
+
Raises:
|
|
568
|
+
ConnectionError: If connection fails
|
|
569
|
+
ConnectionTimeoutError: If request times out
|
|
570
|
+
HttpError: If HTTP error occurs
|
|
571
|
+
"""
|
|
572
|
+
session = await self._ensure_session()
|
|
573
|
+
|
|
574
|
+
# Build full URL - handle "/" specially to avoid trailing slash issues
|
|
575
|
+
if request.url.startswith("http"):
|
|
576
|
+
url = request.url
|
|
577
|
+
elif request.url == "/" or request.url == "":
|
|
578
|
+
# For root path, use base URL directly (no trailing slash)
|
|
579
|
+
url = self._base_url
|
|
580
|
+
else:
|
|
581
|
+
# For other paths, join properly
|
|
582
|
+
url = f"{self._base_url}{request.url}"
|
|
583
|
+
|
|
584
|
+
# Use session ID from request if provided, otherwise use stored one
|
|
585
|
+
if request.session_id:
|
|
586
|
+
self._session_id = request.session_id
|
|
587
|
+
|
|
588
|
+
# Build headers - MCP requires Accept header for both JSON and SSE
|
|
589
|
+
headers = self._build_headers(
|
|
590
|
+
additional_headers=request.headers,
|
|
591
|
+
include_session=True,
|
|
592
|
+
content_type=self.JSON_CONTENT_TYPE if request.body else None,
|
|
593
|
+
accept="application/json, text/event-stream", # MCP requires both
|
|
594
|
+
)
|
|
595
|
+
|
|
596
|
+
logger.debug(
|
|
597
|
+
"Sending %s request to %s with session_id=%s",
|
|
598
|
+
request.method, url, self._session_id
|
|
599
|
+
)
|
|
600
|
+
|
|
601
|
+
try:
|
|
602
|
+
async with session.request(
|
|
603
|
+
method=request.method,
|
|
604
|
+
url=url,
|
|
605
|
+
headers=headers,
|
|
606
|
+
data=request.body,
|
|
607
|
+
) as response:
|
|
608
|
+
# Extract session ID from response
|
|
609
|
+
self._extract_session_id(dict(response.headers))
|
|
610
|
+
|
|
611
|
+
# Handle partial message transmission gracefully
|
|
612
|
+
body = await self._read_response_body_safely(response)
|
|
613
|
+
|
|
614
|
+
http_response = HttpMcpResponse(
|
|
615
|
+
status=response.status,
|
|
616
|
+
headers=dict(response.headers),
|
|
617
|
+
body=body,
|
|
618
|
+
content_type=response.content_type or self.JSON_CONTENT_TYPE,
|
|
619
|
+
)
|
|
620
|
+
|
|
621
|
+
logger.debug(
|
|
622
|
+
"Received response: status=%d, content_type=%s, body_length=%d",
|
|
623
|
+
response.status, response.content_type, len(body)
|
|
624
|
+
)
|
|
625
|
+
|
|
626
|
+
# Check for retryable HTTP status codes
|
|
627
|
+
if response.status in self._retry_config.retryable_status_codes:
|
|
628
|
+
raise HttpError(
|
|
629
|
+
response.status,
|
|
630
|
+
f"Server returned retryable status code: {response.status}",
|
|
631
|
+
details={"body": body[:500] if body else None}
|
|
632
|
+
)
|
|
633
|
+
|
|
634
|
+
return http_response
|
|
635
|
+
|
|
636
|
+
except asyncio.TimeoutError as e:
|
|
637
|
+
logger.error("Request timeout for %s %s", request.method, url)
|
|
638
|
+
raise ConnectionTimeoutError(f"Request timed out: {url}") from e
|
|
639
|
+
except aiohttp.ClientConnectorError as e:
|
|
640
|
+
logger.error("Connection error for %s %s: %s", request.method, url, e)
|
|
641
|
+
raise ConnectionError(f"Failed to connect to {url}: {e}") from e
|
|
642
|
+
except aiohttp.ServerDisconnectedError as e:
|
|
643
|
+
logger.error("Server disconnected for %s %s: %s", request.method, url, e)
|
|
644
|
+
raise ConnectionError(f"Server disconnected: {url}") from e
|
|
645
|
+
except aiohttp.ClientError as e:
|
|
646
|
+
logger.error("HTTP client error for %s %s: %s", request.method, url, e)
|
|
647
|
+
raise NetworkError(f"HTTP request failed: {e}") from e
|
|
648
|
+
|
|
649
|
+
async def _read_response_body_safely(
|
|
650
|
+
self,
|
|
651
|
+
response: aiohttp.ClientResponse,
|
|
652
|
+
max_size: int = 10 * 1024 * 1024, # 10MB default
|
|
653
|
+
) -> str:
|
|
654
|
+
"""Read response body with graceful handling of partial transmission.
|
|
655
|
+
|
|
656
|
+
Args:
|
|
657
|
+
response: aiohttp response object
|
|
658
|
+
max_size: Maximum body size to read in bytes
|
|
659
|
+
|
|
660
|
+
Returns:
|
|
661
|
+
Response body as string
|
|
662
|
+
|
|
663
|
+
Raises:
|
|
664
|
+
StreamError: If body exceeds max size or cannot be decoded
|
|
665
|
+
"""
|
|
666
|
+
buffer = PartialMessageBuffer(max_buffer_size=max_size)
|
|
667
|
+
|
|
668
|
+
try:
|
|
669
|
+
async for chunk in response.content.iter_any():
|
|
670
|
+
if chunk:
|
|
671
|
+
buffer.append(chunk)
|
|
672
|
+
|
|
673
|
+
# Get all accumulated data
|
|
674
|
+
data = buffer.get_remaining()
|
|
675
|
+
|
|
676
|
+
# Try to decode as UTF-8
|
|
677
|
+
try:
|
|
678
|
+
return data.decode("utf-8")
|
|
679
|
+
except UnicodeDecodeError:
|
|
680
|
+
# Try with error handling
|
|
681
|
+
return data.decode("utf-8", errors="replace")
|
|
682
|
+
|
|
683
|
+
except asyncio.TimeoutError:
|
|
684
|
+
# Return partial data on timeout
|
|
685
|
+
logger.warning("Timeout while reading response body, returning partial data")
|
|
686
|
+
data = buffer.get_remaining()
|
|
687
|
+
if data:
|
|
688
|
+
try:
|
|
689
|
+
return data.decode("utf-8", errors="replace")
|
|
690
|
+
except Exception:
|
|
691
|
+
return ""
|
|
692
|
+
return ""
|
|
693
|
+
except aiohttp.ClientPayloadError as e:
|
|
694
|
+
# Handle incomplete payload gracefully
|
|
695
|
+
logger.warning("Incomplete payload received: %s", e)
|
|
696
|
+
data = buffer.get_remaining()
|
|
697
|
+
if data:
|
|
698
|
+
try:
|
|
699
|
+
return data.decode("utf-8", errors="replace")
|
|
700
|
+
except Exception:
|
|
701
|
+
return ""
|
|
702
|
+
return ""
|
|
703
|
+
|
|
704
|
+
async def post(
|
|
705
|
+
self,
|
|
706
|
+
path: str,
|
|
707
|
+
body: str,
|
|
708
|
+
headers: Optional[Dict[str, str]] = None,
|
|
709
|
+
) -> HttpMcpResponse:
|
|
710
|
+
"""Send a POST request to the MCP server.
|
|
711
|
+
|
|
712
|
+
Convenience method for sending POST requests.
|
|
713
|
+
|
|
714
|
+
Args:
|
|
715
|
+
path: URL path (will be appended to base URL)
|
|
716
|
+
body: Request body (JSON string)
|
|
717
|
+
headers: Additional headers
|
|
718
|
+
|
|
719
|
+
Returns:
|
|
720
|
+
HTTP MCP response
|
|
721
|
+
"""
|
|
722
|
+
request = HttpMcpRequest(
|
|
723
|
+
method="POST",
|
|
724
|
+
url=path,
|
|
725
|
+
headers=headers or {},
|
|
726
|
+
body=body,
|
|
727
|
+
session_id=self._session_id,
|
|
728
|
+
)
|
|
729
|
+
return await self.send_request(request)
|
|
730
|
+
|
|
731
|
+
async def get(
|
|
732
|
+
self,
|
|
733
|
+
path: str,
|
|
734
|
+
headers: Optional[Dict[str, str]] = None,
|
|
735
|
+
) -> HttpMcpResponse:
|
|
736
|
+
"""Send a GET request to the MCP server.
|
|
737
|
+
|
|
738
|
+
Convenience method for sending GET requests.
|
|
739
|
+
|
|
740
|
+
Args:
|
|
741
|
+
path: URL path (will be appended to base URL)
|
|
742
|
+
headers: Additional headers
|
|
743
|
+
|
|
744
|
+
Returns:
|
|
745
|
+
HTTP MCP response
|
|
746
|
+
"""
|
|
747
|
+
request = HttpMcpRequest(
|
|
748
|
+
method="GET",
|
|
749
|
+
url=path,
|
|
750
|
+
headers=headers or {},
|
|
751
|
+
session_id=self._session_id,
|
|
752
|
+
)
|
|
753
|
+
return await self.send_request(request)
|
|
754
|
+
|
|
755
|
+
@staticmethod
|
|
756
|
+
def _parse_sse_event(raw_event: str) -> SSEEvent:
|
|
757
|
+
"""Parse a raw SSE event string into an SSEEvent object.
|
|
758
|
+
|
|
759
|
+
Args:
|
|
760
|
+
raw_event: Raw event string from SSE stream
|
|
761
|
+
|
|
762
|
+
Returns:
|
|
763
|
+
Parsed SSEEvent object
|
|
764
|
+
"""
|
|
765
|
+
event = SSEEvent()
|
|
766
|
+
data_lines = []
|
|
767
|
+
|
|
768
|
+
for line in raw_event.split("\n"):
|
|
769
|
+
line = line.strip()
|
|
770
|
+
if not line:
|
|
771
|
+
continue
|
|
772
|
+
|
|
773
|
+
if line.startswith("event:"):
|
|
774
|
+
event.event = line[6:].strip()
|
|
775
|
+
elif line.startswith("data:"):
|
|
776
|
+
data_lines.append(line[5:].strip())
|
|
777
|
+
elif line.startswith("id:"):
|
|
778
|
+
event.id = line[3:].strip()
|
|
779
|
+
elif line.startswith("retry:"):
|
|
780
|
+
try:
|
|
781
|
+
event.retry = int(line[6:].strip())
|
|
782
|
+
except ValueError:
|
|
783
|
+
pass
|
|
784
|
+
|
|
785
|
+
event.data = "\n".join(data_lines)
|
|
786
|
+
return event
|
|
787
|
+
|
|
788
|
+
async def open_sse_stream(
|
|
789
|
+
self,
|
|
790
|
+
path: str = "",
|
|
791
|
+
headers: Optional[Dict[str, str]] = None,
|
|
792
|
+
) -> AsyncIterator[SSEEvent]:
|
|
793
|
+
"""Open a Server-Sent Events stream.
|
|
794
|
+
|
|
795
|
+
Opens a persistent connection to receive server-sent events.
|
|
796
|
+
The stream will yield SSEEvent objects as they arrive.
|
|
797
|
+
|
|
798
|
+
Args:
|
|
799
|
+
path: URL path for SSE endpoint (default: base URL)
|
|
800
|
+
headers: Additional headers
|
|
801
|
+
|
|
802
|
+
Yields:
|
|
803
|
+
SSEEvent objects as they arrive from the server
|
|
804
|
+
|
|
805
|
+
Raises:
|
|
806
|
+
ConnectionError: If connection fails
|
|
807
|
+
StreamError: If stream encounters an error
|
|
808
|
+
"""
|
|
809
|
+
session = await self._ensure_session()
|
|
810
|
+
|
|
811
|
+
# Build full URL - handle empty/root path specially to avoid trailing slash issues
|
|
812
|
+
if path.startswith("http"):
|
|
813
|
+
url = path
|
|
814
|
+
elif path == "/" or path == "":
|
|
815
|
+
# For root path, use base URL directly (no trailing slash)
|
|
816
|
+
url = self._base_url
|
|
817
|
+
else:
|
|
818
|
+
# For other paths, join properly
|
|
819
|
+
url = f"{self._base_url}{path}"
|
|
820
|
+
|
|
821
|
+
# Build headers for SSE
|
|
822
|
+
request_headers = self._build_headers(
|
|
823
|
+
additional_headers=headers,
|
|
824
|
+
include_session=True,
|
|
825
|
+
accept=self.SSE_CONTENT_TYPE,
|
|
826
|
+
)
|
|
827
|
+
|
|
828
|
+
logger.info("Opening SSE stream to %s with session_id=%s", url, self._session_id)
|
|
829
|
+
|
|
830
|
+
try:
|
|
831
|
+
async with session.get(
|
|
832
|
+
url,
|
|
833
|
+
headers=request_headers,
|
|
834
|
+
) as response:
|
|
835
|
+
# Verify we got an SSE response
|
|
836
|
+
content_type = response.content_type or ""
|
|
837
|
+
if not content_type.startswith(self.SSE_CONTENT_TYPE):
|
|
838
|
+
raise StreamError(
|
|
839
|
+
f"Expected SSE content type, got: {content_type}"
|
|
840
|
+
)
|
|
841
|
+
|
|
842
|
+
# Extract session ID from response
|
|
843
|
+
self._extract_session_id(dict(response.headers))
|
|
844
|
+
|
|
845
|
+
logger.debug("SSE stream opened successfully")
|
|
846
|
+
|
|
847
|
+
# Buffer for accumulating event data
|
|
848
|
+
buffer = ""
|
|
849
|
+
|
|
850
|
+
async for chunk in response.content.iter_any():
|
|
851
|
+
if not chunk:
|
|
852
|
+
continue
|
|
853
|
+
|
|
854
|
+
# Decode chunk and add to buffer
|
|
855
|
+
try:
|
|
856
|
+
text = chunk.decode("utf-8")
|
|
857
|
+
except UnicodeDecodeError:
|
|
858
|
+
logger.warning("Failed to decode SSE chunk, skipping")
|
|
859
|
+
continue
|
|
860
|
+
|
|
861
|
+
buffer += text
|
|
862
|
+
|
|
863
|
+
# Process complete events (separated by double newlines)
|
|
864
|
+
while "\n\n" in buffer:
|
|
865
|
+
event_str, buffer = buffer.split("\n\n", 1)
|
|
866
|
+
if event_str.strip():
|
|
867
|
+
sse_event = self._parse_sse_event(event_str)
|
|
868
|
+
logger.debug(
|
|
869
|
+
"Received SSE event: type=%s, id=%s, data_length=%d",
|
|
870
|
+
sse_event.event, sse_event.id, len(sse_event.data)
|
|
871
|
+
)
|
|
872
|
+
yield sse_event
|
|
873
|
+
|
|
874
|
+
# Process any remaining data in buffer
|
|
875
|
+
if buffer.strip():
|
|
876
|
+
sse_event = self._parse_sse_event(buffer)
|
|
877
|
+
if sse_event.data:
|
|
878
|
+
yield sse_event
|
|
879
|
+
|
|
880
|
+
except asyncio.TimeoutError as e:
|
|
881
|
+
logger.error("SSE stream timeout for %s", url)
|
|
882
|
+
raise ConnectionTimeoutError(f"SSE stream timed out: {url}") from e
|
|
883
|
+
except aiohttp.ClientConnectorError as e:
|
|
884
|
+
logger.error("SSE connection error for %s: %s", url, e)
|
|
885
|
+
raise ConnectionError(f"Failed to connect to SSE stream: {url}") from e
|
|
886
|
+
except aiohttp.ClientError as e:
|
|
887
|
+
logger.error("SSE client error for %s: %s", url, e)
|
|
888
|
+
raise StreamError(f"SSE stream error: {e}") from e
|
|
889
|
+
|
|
890
|
+
@asynccontextmanager
|
|
891
|
+
async def sse_stream_context(
|
|
892
|
+
self,
|
|
893
|
+
path: str = "",
|
|
894
|
+
headers: Optional[Dict[str, str]] = None,
|
|
895
|
+
):
|
|
896
|
+
"""Context manager for SSE streams.
|
|
897
|
+
|
|
898
|
+
Provides a context manager interface for SSE streams with
|
|
899
|
+
automatic cleanup on exit.
|
|
900
|
+
|
|
901
|
+
Args:
|
|
902
|
+
path: URL path for SSE endpoint
|
|
903
|
+
headers: Additional headers
|
|
904
|
+
|
|
905
|
+
Yields:
|
|
906
|
+
AsyncIterator of SSEEvent objects
|
|
907
|
+
|
|
908
|
+
Example:
|
|
909
|
+
async with client.sse_stream_context("/events") as stream:
|
|
910
|
+
async for event in stream:
|
|
911
|
+
process(event)
|
|
912
|
+
"""
|
|
913
|
+
stream = self.open_sse_stream(path, headers)
|
|
914
|
+
try:
|
|
915
|
+
yield stream
|
|
916
|
+
finally:
|
|
917
|
+
# Cleanup is handled by the async generator
|
|
918
|
+
pass
|
|
919
|
+
|
|
920
|
+
async def close(self) -> None:
|
|
921
|
+
"""Close the HTTP client and release resources.
|
|
922
|
+
|
|
923
|
+
Closes the aiohttp session and connector, releasing all
|
|
924
|
+
pooled connections.
|
|
925
|
+
"""
|
|
926
|
+
self._is_closed = True
|
|
927
|
+
|
|
928
|
+
if self._session and not self._session.closed:
|
|
929
|
+
await self._session.close()
|
|
930
|
+
logger.debug("Closed aiohttp session")
|
|
931
|
+
|
|
932
|
+
if self._connector and not self._connector.closed:
|
|
933
|
+
self._connector.close()
|
|
934
|
+
logger.debug("Closed TCP connector")
|
|
935
|
+
|
|
936
|
+
self._session = None
|
|
937
|
+
self._connector = None
|
|
938
|
+
|
|
939
|
+
logger.info(
|
|
940
|
+
"HTTP client closed",
|
|
941
|
+
total_connection_failures=len(self._connection_failures),
|
|
942
|
+
)
|
|
943
|
+
|
|
944
|
+
def _log_connection_diagnostics(self, recent_failures: List[Dict[str, Any]]) -> None:
|
|
945
|
+
"""Log diagnostic information for repeated connection failures.
|
|
946
|
+
|
|
947
|
+
Args:
|
|
948
|
+
recent_failures: List of recent failure records
|
|
949
|
+
"""
|
|
950
|
+
# Analyze failure patterns
|
|
951
|
+
error_types = {}
|
|
952
|
+
urls = {}
|
|
953
|
+
for failure in recent_failures:
|
|
954
|
+
error_type = failure["error_type"]
|
|
955
|
+
url = failure["url"]
|
|
956
|
+
error_types[error_type] = error_types.get(error_type, 0) + 1
|
|
957
|
+
urls[url] = urls.get(url, 0) + 1
|
|
958
|
+
|
|
959
|
+
# Get most recent failure
|
|
960
|
+
most_recent = recent_failures[-1] if recent_failures else None
|
|
961
|
+
|
|
962
|
+
import time
|
|
963
|
+
time_since_success = None
|
|
964
|
+
if self._last_successful_request:
|
|
965
|
+
time_since_success = time.time() - self._last_successful_request
|
|
966
|
+
|
|
967
|
+
logger.error(
|
|
968
|
+
"DIAGNOSTIC: Repeated connection failures detected",
|
|
969
|
+
failure_count=len(recent_failures),
|
|
970
|
+
failure_window_seconds=300,
|
|
971
|
+
error_types=error_types,
|
|
972
|
+
affected_urls=urls,
|
|
973
|
+
most_recent_error=most_recent["error_message"] if most_recent else None,
|
|
974
|
+
most_recent_url=most_recent["url"] if most_recent else None,
|
|
975
|
+
base_url=self._base_url,
|
|
976
|
+
timeout=self._timeout.total,
|
|
977
|
+
max_connections=self._max_connections,
|
|
978
|
+
time_since_last_success=time_since_success,
|
|
979
|
+
diagnostic_suggestions=[
|
|
980
|
+
"Check network connectivity to the MCP server",
|
|
981
|
+
"Verify the MCP server URL is correct and accessible",
|
|
982
|
+
"Check if the MCP server is running and accepting connections",
|
|
983
|
+
"Review firewall rules and network policies",
|
|
984
|
+
"Check DNS resolution for the server hostname",
|
|
985
|
+
"Verify SSL/TLS certificates if using HTTPS",
|
|
986
|
+
"Consider increasing timeout or retry settings",
|
|
987
|
+
],
|
|
988
|
+
)
|
|
989
|
+
|
|
990
|
+
async def __aenter__(self) -> "HttpClientImpl":
|
|
991
|
+
"""Async context manager entry."""
|
|
992
|
+
await self._ensure_session()
|
|
993
|
+
return self
|
|
994
|
+
|
|
995
|
+
async def __aexit__(self, exc_type, exc_val, exc_tb) -> None:
|
|
996
|
+
"""Async context manager exit."""
|
|
997
|
+
await self.close()
|