dexscreen 0.0.2__py3-none-any.whl → 0.0.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dexscreen/__init__.py +87 -0
- dexscreen/api/client.py +275 -42
- dexscreen/core/exceptions.py +1067 -0
- dexscreen/core/http.py +859 -117
- dexscreen/core/validators.py +542 -0
- dexscreen/stream/polling.py +288 -78
- dexscreen/utils/__init__.py +54 -1
- dexscreen/utils/filters.py +182 -12
- dexscreen/utils/logging_config.py +421 -0
- dexscreen/utils/middleware.py +363 -0
- dexscreen/utils/ratelimit.py +212 -8
- dexscreen/utils/retry.py +357 -0
- {dexscreen-0.0.2.dist-info → dexscreen-0.0.5.dist-info}/METADATA +52 -1
- dexscreen-0.0.5.dist-info/RECORD +22 -0
- dexscreen-0.0.2.dist-info/RECORD +0 -17
- {dexscreen-0.0.2.dist-info → dexscreen-0.0.5.dist-info}/WHEEL +0 -0
- {dexscreen-0.0.2.dist-info → dexscreen-0.0.5.dist-info}/licenses/LICENSE +0 -0
dexscreen/core/http.py
CHANGED
@@ -1,9 +1,10 @@
|
|
1
1
|
"""
|
2
|
-
Enhanced with
|
2
|
+
Enhanced HTTP client with structured logging and error context preservation
|
3
3
|
"""
|
4
4
|
|
5
5
|
import asyncio
|
6
6
|
import contextlib
|
7
|
+
import time
|
7
8
|
from datetime import datetime, timedelta
|
8
9
|
from enum import Enum
|
9
10
|
from threading import Lock
|
@@ -13,7 +14,16 @@ import orjson
|
|
13
14
|
from curl_cffi.requests import AsyncSession, Session
|
14
15
|
|
15
16
|
from ..utils.browser_selector import get_random_browser
|
17
|
+
from ..utils.logging_config import generate_correlation_id, get_contextual_logger, with_correlation_id
|
16
18
|
from ..utils.ratelimit import RateLimiter
|
19
|
+
from ..utils.retry import RetryConfig, RetryManager, RetryPresets
|
20
|
+
from .exceptions import (
|
21
|
+
HttpConnectionError,
|
22
|
+
HttpRequestError,
|
23
|
+
HttpResponseParsingError,
|
24
|
+
HttpSessionError,
|
25
|
+
HttpTimeoutError,
|
26
|
+
)
|
17
27
|
|
18
28
|
# Type alias for HTTP methods
|
19
29
|
HttpMethod = Literal["GET", "POST", "PUT", "DELETE", "OPTIONS", "HEAD", "TRACE"]
|
@@ -36,6 +46,8 @@ class HttpClientCffi:
|
|
36
46
|
- Zero-downtime configuration updates
|
37
47
|
- Graceful session switching
|
38
48
|
- Automatic connection warm-up
|
49
|
+
- Enhanced structured logging with correlation IDs
|
50
|
+
- Request/response tracking and error context preservation
|
39
51
|
"""
|
40
52
|
|
41
53
|
def __init__(
|
@@ -45,6 +57,7 @@ class HttpClientCffi:
|
|
45
57
|
base_url: str = "https://api.dexscreener.com/",
|
46
58
|
client_kwargs: Optional[dict[str, Any]] = None,
|
47
59
|
warmup_url: str = "/latest/dex/tokens/solana?limit=1",
|
60
|
+
retry_config: Optional[RetryConfig] = None,
|
48
61
|
):
|
49
62
|
"""
|
50
63
|
Initialize HTTP client with rate limiting and browser impersonation.
|
@@ -57,17 +70,27 @@ class HttpClientCffi:
|
|
57
70
|
Common options include:
|
58
71
|
- impersonate: Browser to impersonate (default: "realworld")
|
59
72
|
- proxies: Proxy configuration
|
60
|
-
- timeout: Request timeout
|
73
|
+
- timeout: Request timeout (default: 10 seconds)
|
61
74
|
- headers: Additional headers
|
62
75
|
- verify: SSL verification
|
63
76
|
warmup_url: URL path for warming up new sessions
|
77
|
+
retry_config: Retry configuration for network operations.
|
78
|
+
If None, uses default API-optimized retry settings.
|
64
79
|
"""
|
65
80
|
self._limiter = RateLimiter(calls, period)
|
66
81
|
self.base_url = base_url
|
67
82
|
self.warmup_url = warmup_url
|
68
83
|
|
84
|
+
# Setup retry configuration
|
85
|
+
self.retry_config = retry_config or RetryPresets.api_calls()
|
86
|
+
|
69
87
|
# Setup client kwargs with defaults
|
70
88
|
self.client_kwargs = client_kwargs or {}
|
89
|
+
|
90
|
+
# Set default timeout if not specified
|
91
|
+
if "timeout" not in self.client_kwargs:
|
92
|
+
self.client_kwargs["timeout"] = 10
|
93
|
+
|
71
94
|
# Use our custom realworld browser selection if not specified
|
72
95
|
if "impersonate" not in self.client_kwargs:
|
73
96
|
self.client_kwargs["impersonate"] = get_random_browser()
|
@@ -93,43 +116,161 @@ class HttpClientCffi:
|
|
93
116
|
# Async lock for session switching
|
94
117
|
self._switch_lock = asyncio.Lock()
|
95
118
|
|
96
|
-
#
|
119
|
+
# Enhanced statistics with timing data
|
97
120
|
self._stats = {
|
98
121
|
"switches": 0,
|
99
122
|
"failed_requests": 0,
|
100
123
|
"successful_requests": 0,
|
101
124
|
"last_switch": None,
|
125
|
+
"retry_attempts": 0,
|
126
|
+
"retry_successes": 0,
|
127
|
+
"retry_failures": 0,
|
128
|
+
"total_requests": 0,
|
129
|
+
"average_response_time": 0.0,
|
130
|
+
"min_response_time": float("inf"),
|
131
|
+
"max_response_time": 0.0,
|
102
132
|
}
|
103
133
|
|
134
|
+
# Enhanced logging
|
135
|
+
self.logger = get_contextual_logger(__name__)
|
136
|
+
|
104
137
|
def _create_absolute_url(self, relative: str) -> str:
|
105
138
|
base = self.base_url.rstrip("/")
|
106
139
|
relative = relative.lstrip("/")
|
107
140
|
return f"{base}/{relative}"
|
108
141
|
|
142
|
+
def _update_response_time_stats(self, duration: float):
|
143
|
+
"""Update response time statistics"""
|
144
|
+
with self._lock:
|
145
|
+
self._stats["total_requests"] += 1
|
146
|
+
# Update running average response time
|
147
|
+
total_requests = self._stats["total_requests"]
|
148
|
+
current_avg = self._stats["average_response_time"]
|
149
|
+
self._stats["average_response_time"] = (current_avg * (total_requests - 1) + duration) / total_requests
|
150
|
+
# Update min/max
|
151
|
+
self._stats["min_response_time"] = min(self._stats["min_response_time"], duration)
|
152
|
+
self._stats["max_response_time"] = max(self._stats["max_response_time"], duration)
|
153
|
+
|
154
|
+
def _parse_json_response(
|
155
|
+
self,
|
156
|
+
response: Any,
|
157
|
+
method: str,
|
158
|
+
url: str,
|
159
|
+
context: dict[str, Any]
|
160
|
+
) -> Union[list, dict, None]:
|
161
|
+
"""Parse JSON response with proper error handling and logging"""
|
162
|
+
content_type = response.headers.get("content-type", "")
|
163
|
+
|
164
|
+
if "application/json" not in content_type:
|
165
|
+
# Non-JSON response
|
166
|
+
content_preview = (
|
167
|
+
response.content[:200].decode("utf-8", errors="replace")
|
168
|
+
if response.content else ""
|
169
|
+
)
|
170
|
+
|
171
|
+
parse_context = context.copy()
|
172
|
+
parse_context.update({
|
173
|
+
"expected_json": True,
|
174
|
+
"received_content_type": content_type,
|
175
|
+
"content_preview": content_preview,
|
176
|
+
})
|
177
|
+
|
178
|
+
self.logger.warning("Received non-JSON response when JSON expected", context=parse_context)
|
179
|
+
|
180
|
+
raise HttpResponseParsingError(
|
181
|
+
method,
|
182
|
+
url,
|
183
|
+
content_type,
|
184
|
+
content_preview,
|
185
|
+
original_error=Exception(f"Expected JSON response but got {content_type}")
|
186
|
+
)
|
187
|
+
|
188
|
+
try:
|
189
|
+
return orjson.loads(response.content)
|
190
|
+
except Exception as e:
|
191
|
+
content_preview = (
|
192
|
+
response.content[:200].decode("utf-8", errors="replace")
|
193
|
+
if response.content else ""
|
194
|
+
)
|
195
|
+
|
196
|
+
parse_context = context.copy()
|
197
|
+
parse_context.update({
|
198
|
+
"parse_error": str(e),
|
199
|
+
"content_preview": content_preview,
|
200
|
+
})
|
201
|
+
|
202
|
+
self.logger.error(
|
203
|
+
"Failed to parse JSON response: %s",
|
204
|
+
str(e),
|
205
|
+
context=parse_context,
|
206
|
+
exc_info=True
|
207
|
+
)
|
208
|
+
|
209
|
+
raise HttpResponseParsingError(
|
210
|
+
method, url, content_type, content_preview, original_error=e
|
211
|
+
) from e
|
212
|
+
|
109
213
|
async def _ensure_active_session(self) -> AsyncSession:
|
110
214
|
"""Ensure there's an active session"""
|
111
215
|
async with self._switch_lock:
|
112
|
-
#
|
113
|
-
if self.
|
114
|
-
|
115
|
-
|
116
|
-
|
216
|
+
# Create primary session if it doesn't exist
|
217
|
+
if self._primary_session is None:
|
218
|
+
session_context = {
|
219
|
+
"operation": "create_session",
|
220
|
+
"session_type": "primary_async",
|
221
|
+
"browser": self.client_kwargs.get("impersonate", "unknown"),
|
222
|
+
}
|
223
|
+
|
224
|
+
self.logger.debug("Creating new async session", context=session_context)
|
225
|
+
|
117
226
|
try:
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
126
|
-
|
127
|
-
|
128
|
-
|
129
|
-
|
130
|
-
|
227
|
+
self._primary_session = AsyncSession(**self.client_kwargs)
|
228
|
+
|
229
|
+
# Warm up connection
|
230
|
+
warmup_start = time.time()
|
231
|
+
try:
|
232
|
+
warmup_url = self._create_absolute_url(self.warmup_url)
|
233
|
+
response = await self._primary_session.get(warmup_url)
|
234
|
+
if response.status_code == 200:
|
235
|
+
warmup_duration = time.time() - warmup_start
|
236
|
+
|
237
|
+
session_context.update(
|
238
|
+
{
|
239
|
+
"warmup_success": True,
|
240
|
+
"warmup_time_ms": round(warmup_duration * 1000, 2),
|
241
|
+
"warmup_status": response.status_code,
|
242
|
+
}
|
243
|
+
)
|
244
|
+
|
245
|
+
self.logger.debug("Session warmup successful", context=session_context)
|
246
|
+
except Exception as e:
|
247
|
+
warmup_duration = time.time() - warmup_start
|
248
|
+
session_context.update(
|
249
|
+
{
|
250
|
+
"warmup_success": False,
|
251
|
+
"warmup_time_ms": round(warmup_duration * 1000, 2),
|
252
|
+
"warmup_error": str(e),
|
253
|
+
}
|
254
|
+
)
|
255
|
+
|
256
|
+
self.logger.warning("Session warmup failed", context=session_context)
|
257
|
+
|
258
|
+
# Always activate the session (warmup is optional)
|
131
259
|
self._primary_state = SessionState.ACTIVE
|
132
260
|
|
261
|
+
except Exception as e:
|
262
|
+
session_context.update(
|
263
|
+
{
|
264
|
+
"creation_error": str(e),
|
265
|
+
"error_type": type(e).__name__,
|
266
|
+
}
|
267
|
+
)
|
268
|
+
|
269
|
+
self.logger.error(
|
270
|
+
"Failed to create async session: %s", str(e), context=session_context, exc_info=True
|
271
|
+
)
|
272
|
+
raise
|
273
|
+
|
133
274
|
if self._primary_session is None:
|
134
275
|
raise RuntimeError("Failed to create primary session")
|
135
276
|
return self._primary_session
|
@@ -138,22 +279,76 @@ class HttpClientCffi:
|
|
138
279
|
"""Ensure there's a sync session"""
|
139
280
|
with self._lock:
|
140
281
|
if self._sync_primary is None:
|
141
|
-
|
142
|
-
|
282
|
+
session_context = {
|
283
|
+
"operation": "create_session",
|
284
|
+
"session_type": "primary_sync",
|
285
|
+
"browser": self.client_kwargs.get("impersonate", "unknown"),
|
286
|
+
}
|
287
|
+
|
288
|
+
self.logger.debug("Creating new sync session", context=session_context)
|
289
|
+
|
143
290
|
try:
|
144
|
-
|
145
|
-
|
146
|
-
#
|
147
|
-
|
148
|
-
|
149
|
-
|
150
|
-
|
291
|
+
self._sync_primary = Session(**self.client_kwargs)
|
292
|
+
|
293
|
+
# Warm up
|
294
|
+
warmup_start = time.time()
|
295
|
+
try:
|
296
|
+
warmup_url = self._create_absolute_url(self.warmup_url)
|
297
|
+
response = self._sync_primary.get(warmup_url)
|
298
|
+
warmup_duration = time.time() - warmup_start
|
299
|
+
|
300
|
+
if response.status_code == 200:
|
301
|
+
session_context.update(
|
302
|
+
{
|
303
|
+
"warmup_success": True,
|
304
|
+
"warmup_time_ms": round(warmup_duration * 1000, 2),
|
305
|
+
"warmup_status": response.status_code,
|
306
|
+
}
|
307
|
+
)
|
308
|
+
|
309
|
+
self.logger.debug("Sync session warmup successful", context=session_context)
|
310
|
+
else:
|
311
|
+
session_context.update(
|
312
|
+
{
|
313
|
+
"warmup_success": False,
|
314
|
+
"warmup_time_ms": round(warmup_duration * 1000, 2),
|
315
|
+
"warmup_status": response.status_code,
|
316
|
+
}
|
317
|
+
)
|
318
|
+
|
319
|
+
self.logger.warning("Sync session warmup returned non-200", context=session_context)
|
320
|
+
|
321
|
+
except Exception as e:
|
322
|
+
warmup_duration = time.time() - warmup_start
|
323
|
+
session_context.update(
|
324
|
+
{
|
325
|
+
"warmup_success": False,
|
326
|
+
"warmup_time_ms": round(warmup_duration * 1000, 2),
|
327
|
+
"warmup_error": str(e),
|
328
|
+
}
|
329
|
+
)
|
330
|
+
|
331
|
+
self.logger.warning("Sync session warmup failed", context=session_context)
|
332
|
+
|
333
|
+
except Exception as e:
|
334
|
+
session_context.update(
|
335
|
+
{
|
336
|
+
"creation_error": str(e),
|
337
|
+
"error_type": type(e).__name__,
|
338
|
+
}
|
339
|
+
)
|
340
|
+
|
341
|
+
self.logger.error(
|
342
|
+
"Failed to create sync session: %s", str(e), context=session_context, exc_info=True
|
343
|
+
)
|
344
|
+
raise
|
151
345
|
|
152
346
|
return self._sync_primary
|
153
347
|
|
348
|
+
@with_correlation_id()
|
154
349
|
def request(self, method: HttpMethod, url: str, **kwargs) -> Union[list, dict, None]:
|
155
350
|
"""
|
156
|
-
Synchronous request with rate limiting and browser impersonation.
|
351
|
+
Synchronous request with rate limiting, retry logic, and browser impersonation.
|
157
352
|
|
158
353
|
Args:
|
159
354
|
method: HTTP method (GET, POST, etc.)
|
@@ -162,30 +357,180 @@ class HttpClientCffi:
|
|
162
357
|
|
163
358
|
Returns:
|
164
359
|
Parsed JSON response
|
360
|
+
|
361
|
+
Raises:
|
362
|
+
HttpConnectionError: When unable to establish connection (after retries)
|
363
|
+
HttpTimeoutError: When request times out (after retries)
|
364
|
+
HttpRequestError: When request fails with HTTP error status (after retries)
|
365
|
+
HttpResponseParsingError: When response parsing fails
|
366
|
+
HttpSessionError: When session creation fails
|
165
367
|
"""
|
166
368
|
url = self._create_absolute_url(url)
|
369
|
+
retry_manager = RetryManager(self.retry_config)
|
370
|
+
request_start = time.time()
|
371
|
+
|
372
|
+
request_context = {
|
373
|
+
"method": method,
|
374
|
+
"url": url,
|
375
|
+
"has_kwargs": bool(kwargs),
|
376
|
+
"request_id": generate_correlation_id()[:8],
|
377
|
+
"session_type": "sync",
|
378
|
+
}
|
379
|
+
|
380
|
+
self.logger.debug("Starting sync HTTP request", context=request_context)
|
167
381
|
|
168
382
|
with self._limiter:
|
383
|
+
# Try session creation first
|
169
384
|
try:
|
170
|
-
# Use persistent session
|
171
385
|
session = self._ensure_sync_session()
|
172
|
-
|
173
|
-
|
174
|
-
|
175
|
-
|
176
|
-
|
177
|
-
|
178
|
-
|
179
|
-
|
180
|
-
|
181
|
-
|
182
|
-
|
183
|
-
|
184
|
-
|
185
|
-
|
386
|
+
except Exception as e:
|
387
|
+
error_context = request_context.copy()
|
388
|
+
error_context.update(
|
389
|
+
{
|
390
|
+
"error_type": type(e).__name__,
|
391
|
+
"error_message": str(e),
|
392
|
+
}
|
393
|
+
)
|
394
|
+
|
395
|
+
self.logger.error("Failed to create sync session: %s", str(e), context=error_context, exc_info=True)
|
396
|
+
raise HttpSessionError("Failed to create or access sync session", original_error=e) from e
|
397
|
+
|
398
|
+
while True:
|
399
|
+
try:
|
400
|
+
response = session.request(method, url, **kwargs) # type: ignore
|
401
|
+
response.raise_for_status()
|
402
|
+
|
403
|
+
request_duration = time.time() - request_start
|
404
|
+
self._update_response_time_stats(request_duration)
|
405
|
+
|
406
|
+
# Track success
|
407
|
+
with self._lock:
|
408
|
+
self._stats["successful_requests"] += 1
|
409
|
+
if retry_manager.attempt > 0:
|
410
|
+
self._stats["retry_successes"] += 1
|
411
|
+
|
412
|
+
# Log successful response
|
413
|
+
response_context = request_context.copy()
|
414
|
+
response_context.update(
|
415
|
+
{
|
416
|
+
"status_code": response.status_code,
|
417
|
+
"response_time_ms": round(request_duration * 1000, 2),
|
418
|
+
"content_type": response.headers.get("content-type", "unknown"),
|
419
|
+
"content_length": len(response.content) if response.content else 0,
|
420
|
+
"retry_attempt": retry_manager.attempt,
|
421
|
+
"success": True,
|
422
|
+
}
|
423
|
+
)
|
424
|
+
|
425
|
+
self.logger.debug("Sync HTTP request completed successfully", context=response_context)
|
426
|
+
|
427
|
+
# Parse JSON response
|
428
|
+
return self._parse_json_response(response, method, url, response_context)
|
429
|
+
|
430
|
+
except HttpResponseParsingError:
|
431
|
+
# Re-raise parsing errors immediately (not retryable)
|
432
|
+
raise
|
433
|
+
except Exception as e:
|
434
|
+
request_duration = time.time() - request_start
|
435
|
+
|
436
|
+
with self._lock:
|
437
|
+
self._stats["failed_requests"] += 1
|
438
|
+
if retry_manager.attempt > 0:
|
439
|
+
self._stats["retry_attempts"] += 1
|
440
|
+
|
441
|
+
# Create error context
|
442
|
+
error_context = request_context.copy()
|
443
|
+
error_context.update(
|
444
|
+
{
|
445
|
+
"error_type": type(e).__name__,
|
446
|
+
"error_message": str(e),
|
447
|
+
"response_time_ms": round(request_duration * 1000, 2),
|
448
|
+
"retry_attempt": retry_manager.attempt,
|
449
|
+
}
|
450
|
+
)
|
451
|
+
|
452
|
+
# Add response details if available
|
453
|
+
if hasattr(e, "response"):
|
454
|
+
response = e.response # type: ignore
|
455
|
+
if response is not None:
|
456
|
+
error_context.update(
|
457
|
+
{
|
458
|
+
"status_code": response.status_code,
|
459
|
+
"response_headers": dict(response.headers),
|
460
|
+
}
|
461
|
+
)
|
462
|
+
|
463
|
+
retry_manager.record_failure(e)
|
464
|
+
|
465
|
+
if retry_manager.should_retry(e):
|
466
|
+
retry_context = error_context.copy()
|
467
|
+
retry_context.update(
|
468
|
+
{
|
469
|
+
"will_retry": True,
|
470
|
+
"max_retries": self.retry_config.max_retries,
|
471
|
+
"retry_delay_ms": round(retry_manager.calculate_delay() * 1000, 2),
|
472
|
+
}
|
473
|
+
)
|
474
|
+
|
475
|
+
self.logger.warning(
|
476
|
+
"Retrying sync request %s %s (attempt %d/%d): %s",
|
477
|
+
method,
|
478
|
+
url,
|
479
|
+
retry_manager.attempt,
|
480
|
+
self.retry_config.max_retries + 1,
|
481
|
+
str(e),
|
482
|
+
context=retry_context,
|
483
|
+
)
|
484
|
+
retry_manager.wait_sync()
|
485
|
+
continue
|
486
|
+
else:
|
487
|
+
# Not retryable or max retries exceeded - classify and raise final error
|
488
|
+
final_error_context = error_context.copy()
|
489
|
+
final_error_context.update(
|
490
|
+
{
|
491
|
+
"final_failure": True,
|
492
|
+
"total_retry_attempts": retry_manager.attempt,
|
493
|
+
"is_retryable": retry_manager.should_retry(e)
|
494
|
+
if retry_manager.attempt < self.retry_config.max_retries
|
495
|
+
else False,
|
496
|
+
}
|
497
|
+
)
|
498
|
+
|
499
|
+
with self._lock:
|
500
|
+
if retry_manager.attempt > 0:
|
501
|
+
self._stats["retry_failures"] += 1
|
502
|
+
|
503
|
+
self.logger.error(
|
504
|
+
"Sync HTTP request failed permanently: %s",
|
505
|
+
str(e),
|
506
|
+
context=final_error_context,
|
507
|
+
exc_info=True,
|
508
|
+
)
|
509
|
+
|
510
|
+
# Classify the error type for final exception
|
511
|
+
error_msg = str(e).lower()
|
512
|
+
if "timeout" in error_msg or "timed out" in error_msg:
|
513
|
+
# Extract timeout value if available from kwargs
|
514
|
+
timeout = kwargs.get("timeout", "unknown")
|
515
|
+
raise HttpTimeoutError(method, url, timeout, original_error=e) from e
|
516
|
+
elif "connection" in error_msg or "resolve" in error_msg or "network" in error_msg:
|
517
|
+
raise HttpConnectionError(method, url, original_error=e) from e
|
518
|
+
else:
|
519
|
+
# Get status code if available
|
520
|
+
status_code = None
|
521
|
+
response_text = None
|
522
|
+
if hasattr(e, "response"):
|
523
|
+
response = e.response # type: ignore
|
524
|
+
if response and hasattr(response, "status_code"):
|
525
|
+
status_code = response.status_code
|
526
|
+
if response and hasattr(response, "content"):
|
527
|
+
response_text = response.content[:200].decode("utf-8", errors="replace")
|
528
|
+
raise HttpRequestError(method, url, status_code, response_text, original_error=e) from e
|
529
|
+
|
530
|
+
@with_correlation_id()
|
186
531
|
async def request_async(self, method: HttpMethod, url: str, **kwargs) -> Union[list, dict, None]:
|
187
532
|
"""
|
188
|
-
Asynchronous request with rate limiting and browser impersonation.
|
533
|
+
Asynchronous request with rate limiting, retry logic, and browser impersonation.
|
189
534
|
|
190
535
|
Args:
|
191
536
|
method: HTTP method (GET, POST, etc.)
|
@@ -194,51 +539,245 @@ class HttpClientCffi:
|
|
194
539
|
|
195
540
|
Returns:
|
196
541
|
Parsed JSON response
|
542
|
+
|
543
|
+
Raises:
|
544
|
+
HttpConnectionError: When unable to establish connection (after retries)
|
545
|
+
HttpTimeoutError: When request times out (after retries)
|
546
|
+
HttpRequestError: When request fails with HTTP error status (after retries)
|
547
|
+
HttpResponseParsingError: When response parsing fails
|
548
|
+
HttpSessionError: When session creation fails
|
197
549
|
"""
|
198
550
|
url = self._create_absolute_url(url)
|
551
|
+
retry_manager = RetryManager(self.retry_config)
|
552
|
+
request_start = time.time()
|
553
|
+
|
554
|
+
request_context = {
|
555
|
+
"method": method,
|
556
|
+
"url": url,
|
557
|
+
"has_kwargs": bool(kwargs),
|
558
|
+
"request_id": generate_correlation_id()[:8],
|
559
|
+
"session_type": "async",
|
560
|
+
}
|
199
561
|
|
200
|
-
async
|
201
|
-
# Get active session
|
202
|
-
session = await self._ensure_active_session()
|
203
|
-
|
204
|
-
# Track active requests
|
205
|
-
with self._lock:
|
206
|
-
self._primary_requests += 1
|
562
|
+
self.logger.debug("Starting async HTTP request", context=request_context)
|
207
563
|
|
208
|
-
|
209
|
-
|
210
|
-
|
211
|
-
|
212
|
-
|
564
|
+
async with self._limiter:
|
565
|
+
while True:
|
566
|
+
# Get active session for each attempt
|
567
|
+
try:
|
568
|
+
session = await self._ensure_active_session()
|
569
|
+
except Exception as e:
|
570
|
+
error_context = request_context.copy()
|
571
|
+
error_context.update(
|
572
|
+
{
|
573
|
+
"error_type": type(e).__name__,
|
574
|
+
"error_message": str(e),
|
575
|
+
}
|
576
|
+
)
|
577
|
+
|
578
|
+
self.logger.error(
|
579
|
+
"Failed to create async session: %s", str(e), context=error_context, exc_info=True
|
580
|
+
)
|
581
|
+
raise HttpSessionError("Failed to create or access async session", original_error=e) from e
|
582
|
+
|
583
|
+
# Track active requests
|
213
584
|
with self._lock:
|
214
|
-
self.
|
215
|
-
|
216
|
-
# Parse response
|
217
|
-
content_type = response.headers.get("content-type", "")
|
218
|
-
if "application/json" in content_type:
|
219
|
-
# Use orjson for better performance
|
220
|
-
return orjson.loads(response.content)
|
221
|
-
else:
|
222
|
-
return None
|
585
|
+
self._primary_requests += 1
|
223
586
|
|
224
|
-
|
225
|
-
|
226
|
-
|
587
|
+
try:
|
588
|
+
response = await session.request(method, url, **kwargs) # type: ignore
|
589
|
+
response.raise_for_status()
|
590
|
+
|
591
|
+
request_duration = time.time() - request_start
|
592
|
+
self._update_response_time_stats(request_duration)
|
593
|
+
|
594
|
+
# Track success
|
595
|
+
with self._lock:
|
596
|
+
self._stats["successful_requests"] += 1
|
597
|
+
if retry_manager.attempt > 0:
|
598
|
+
self._stats["retry_successes"] += 1
|
599
|
+
|
600
|
+
# Log successful response
|
601
|
+
response_context = request_context.copy()
|
602
|
+
response_context.update(
|
603
|
+
{
|
604
|
+
"status_code": response.status_code,
|
605
|
+
"response_time_ms": round(request_duration * 1000, 2),
|
606
|
+
"content_type": response.headers.get("content-type", "unknown"),
|
607
|
+
"content_length": len(response.content) if response.content else 0,
|
608
|
+
"retry_attempt": retry_manager.attempt,
|
609
|
+
"session_state": self._primary_state.value,
|
610
|
+
"success": True,
|
611
|
+
}
|
612
|
+
)
|
613
|
+
|
614
|
+
self.logger.debug("Async HTTP request completed successfully", context=response_context)
|
615
|
+
|
616
|
+
# Parse JSON response
|
617
|
+
return self._parse_json_response(response, method, url, response_context)
|
618
|
+
|
619
|
+
except HttpResponseParsingError:
|
620
|
+
# Re-raise parsing errors immediately (not retryable)
|
621
|
+
with self._lock:
|
622
|
+
self._stats["failed_requests"] += 1
|
623
|
+
raise
|
624
|
+
except Exception as e:
|
625
|
+
request_duration = time.time() - request_start
|
626
|
+
|
627
|
+
with self._lock:
|
628
|
+
self._stats["failed_requests"] += 1
|
629
|
+
if retry_manager.attempt > 0:
|
630
|
+
self._stats["retry_attempts"] += 1
|
631
|
+
|
632
|
+
# Create error context
|
633
|
+
error_context = request_context.copy()
|
634
|
+
error_context.update(
|
635
|
+
{
|
636
|
+
"error_type": type(e).__name__,
|
637
|
+
"error_message": str(e),
|
638
|
+
"response_time_ms": round(request_duration * 1000, 2),
|
639
|
+
"retry_attempt": retry_manager.attempt,
|
640
|
+
"session_state": self._primary_state.value,
|
641
|
+
}
|
642
|
+
)
|
643
|
+
|
644
|
+
# Add response details if available
|
645
|
+
if hasattr(e, "response"):
|
646
|
+
response = e.response # type: ignore
|
647
|
+
if response is not None:
|
648
|
+
error_context.update(
|
649
|
+
{
|
650
|
+
"status_code": response.status_code,
|
651
|
+
"response_headers": dict(response.headers),
|
652
|
+
}
|
653
|
+
)
|
654
|
+
|
655
|
+
self.logger.error("Async HTTP request failed: %s", str(e), context=error_context, exc_info=True)
|
656
|
+
|
657
|
+
retry_manager.record_failure(e)
|
658
|
+
|
659
|
+
if retry_manager.should_retry(e):
|
660
|
+
retry_context = error_context.copy()
|
661
|
+
retry_context.update(
|
662
|
+
{
|
663
|
+
"will_retry": True,
|
664
|
+
"max_retries": self.retry_config.max_retries,
|
665
|
+
"retry_delay_ms": round(retry_manager.calculate_delay() * 1000, 2),
|
666
|
+
}
|
667
|
+
)
|
668
|
+
|
669
|
+
self.logger.warning(
|
670
|
+
"Retrying async request %s %s (attempt %d/%d): %s",
|
671
|
+
method,
|
672
|
+
url,
|
673
|
+
retry_manager.attempt,
|
674
|
+
self.retry_config.max_retries + 1,
|
675
|
+
str(e),
|
676
|
+
context=retry_context,
|
677
|
+
)
|
678
|
+
|
679
|
+
# Decrease request count before waiting
|
680
|
+
with self._lock:
|
681
|
+
self._primary_requests -= 1
|
682
|
+
|
683
|
+
await retry_manager.wait_async()
|
684
|
+
continue
|
685
|
+
else:
|
686
|
+
# Not retryable or max retries exceeded
|
687
|
+
final_error_context = error_context.copy()
|
688
|
+
final_error_context.update(
|
689
|
+
{
|
690
|
+
"final_failure": True,
|
691
|
+
"total_retry_attempts": retry_manager.attempt,
|
692
|
+
"is_retryable": retry_manager.should_retry(e)
|
693
|
+
if retry_manager.attempt < self.retry_config.max_retries
|
694
|
+
else False,
|
695
|
+
}
|
696
|
+
)
|
697
|
+
|
698
|
+
with self._lock:
|
699
|
+
if retry_manager.attempt > 0:
|
700
|
+
self._stats["retry_failures"] += 1
|
701
|
+
|
702
|
+
self.logger.error(
|
703
|
+
"Async HTTP request failed permanently: %s",
|
704
|
+
str(e),
|
705
|
+
context=final_error_context,
|
706
|
+
exc_info=True,
|
707
|
+
)
|
708
|
+
|
709
|
+
# Try failover to secondary session if available (only on final failure)
|
710
|
+
if self._secondary_state == SessionState.ACTIVE:
|
711
|
+
self.logger.info("Attempting failover to secondary session", context=final_error_context)
|
712
|
+
try:
|
713
|
+
# Decrease primary request count before failover
|
714
|
+
with self._lock:
|
715
|
+
self._primary_requests -= 1
|
716
|
+
|
717
|
+
return await self._failover_request(method, url, **kwargs)
|
718
|
+
except Exception as failover_error:
|
719
|
+
failover_context = final_error_context.copy()
|
720
|
+
failover_context.update(
|
721
|
+
{
|
722
|
+
"failover_error_type": type(failover_error).__name__,
|
723
|
+
"failover_error_message": str(failover_error),
|
724
|
+
}
|
725
|
+
)
|
726
|
+
|
727
|
+
self.logger.error(
|
728
|
+
"Failover attempt also failed: %s",
|
729
|
+
str(failover_error),
|
730
|
+
context=failover_context,
|
731
|
+
exc_info=True,
|
732
|
+
)
|
733
|
+
|
734
|
+
# If failover also fails, raise the original error with failover context
|
735
|
+
raise self._classify_async_error(method, url, e, kwargs) from e
|
736
|
+
|
737
|
+
# No failover available or didn't work, classify the error
|
738
|
+
raise self._classify_async_error(method, url, e, kwargs) from e
|
739
|
+
|
740
|
+
finally:
|
741
|
+
# Decrease request count (only if we're not retrying)
|
742
|
+
if not (retry_manager.last_exception and retry_manager.should_retry(retry_manager.last_exception)):
|
743
|
+
with self._lock:
|
744
|
+
self._primary_requests -= 1
|
745
|
+
|
746
|
+
def _classify_async_error(self, method: str, url: str, error: Exception, kwargs: dict) -> Exception:
|
747
|
+
"""Classify an async error into appropriate HTTP exception type"""
|
748
|
+
error_msg = str(error).lower()
|
749
|
+
if "timeout" in error_msg or "timed out" in error_msg:
|
750
|
+
# Extract timeout value if available from kwargs
|
751
|
+
timeout = kwargs.get("timeout", "unknown")
|
752
|
+
return HttpTimeoutError(method, url, timeout, original_error=error)
|
753
|
+
elif "connection" in error_msg or "resolve" in error_msg or "network" in error_msg:
|
754
|
+
return HttpConnectionError(method, url, original_error=error)
|
755
|
+
else:
|
756
|
+
# Get status code if available
|
757
|
+
status_code = None
|
758
|
+
response_text = None
|
759
|
+
if hasattr(error, "response"):
|
760
|
+
response = error.response # type: ignore
|
761
|
+
if response and hasattr(response, "status_code"):
|
762
|
+
status_code = response.status_code
|
763
|
+
if response and hasattr(response, "content"):
|
764
|
+
response_text = response.content[:200].decode("utf-8", errors="replace")
|
765
|
+
return HttpRequestError(method, url, status_code, response_text, original_error=error)
|
227
766
|
|
228
|
-
|
229
|
-
|
230
|
-
|
767
|
+
async def _failover_request(self, method: HttpMethod, url: str, **kwargs) -> Union[list, dict, None]:
|
768
|
+
"""Failover to secondary session - raises exceptions instead of returning None"""
|
769
|
+
if self._secondary_session and self._secondary_state == SessionState.ACTIVE:
|
770
|
+
failover_start = time.time()
|
231
771
|
|
232
|
-
|
772
|
+
failover_context = {
|
773
|
+
"method": method,
|
774
|
+
"url": url,
|
775
|
+
"failover_attempt": True,
|
776
|
+
"request_id": generate_correlation_id()[:8],
|
777
|
+
}
|
233
778
|
|
234
|
-
|
235
|
-
# Decrease request count
|
236
|
-
with self._lock:
|
237
|
-
self._primary_requests -= 1
|
779
|
+
self.logger.info("Executing async failover request", context=failover_context)
|
238
780
|
|
239
|
-
async def _failover_request(self, method: HttpMethod, url: str, **kwargs) -> Union[list, dict, None]:
|
240
|
-
"""Failover to secondary session"""
|
241
|
-
if self._secondary_session and self._secondary_state == SessionState.ACTIVE:
|
242
781
|
try:
|
243
782
|
with self._lock:
|
244
783
|
self._secondary_requests += 1
|
@@ -246,21 +785,61 @@ class HttpClientCffi:
|
|
246
785
|
response = await self._secondary_session.request(method, url, **kwargs) # type: ignore
|
247
786
|
response.raise_for_status()
|
248
787
|
|
249
|
-
|
250
|
-
|
251
|
-
|
252
|
-
|
253
|
-
|
254
|
-
|
255
|
-
|
788
|
+
failover_duration = time.time() - failover_start
|
789
|
+
|
790
|
+
failover_context.update(
|
791
|
+
{
|
792
|
+
"status_code": response.status_code,
|
793
|
+
"response_time_ms": round(failover_duration * 1000, 2),
|
794
|
+
"content_type": response.headers.get("content-type", "unknown"),
|
795
|
+
"success": True,
|
796
|
+
}
|
797
|
+
)
|
798
|
+
|
799
|
+
self.logger.info("Async failover request succeeded", context=failover_context)
|
800
|
+
|
801
|
+
# Parse JSON response
|
802
|
+
return self._parse_json_response(response, method, url, failover_context)
|
803
|
+
|
804
|
+
except HttpResponseParsingError:
|
805
|
+
# Re-raise our custom parsing errors as-is
|
806
|
+
raise
|
807
|
+
except Exception as e:
|
808
|
+
failover_duration = time.time() - failover_start
|
809
|
+
|
810
|
+
error_context = failover_context.copy()
|
811
|
+
error_context.update(
|
812
|
+
{
|
813
|
+
"error_type": type(e).__name__,
|
814
|
+
"error_message": str(e),
|
815
|
+
"response_time_ms": round(failover_duration * 1000, 2),
|
816
|
+
"success": False,
|
817
|
+
}
|
818
|
+
)
|
819
|
+
|
820
|
+
self.logger.error("Async failover request failed: %s", str(e), context=error_context, exc_info=True)
|
821
|
+
|
822
|
+
# Classify and raise the failover error
|
823
|
+
raise self._classify_async_error(method, url, e, kwargs) from e
|
256
824
|
finally:
|
257
825
|
with self._lock:
|
258
826
|
self._secondary_requests -= 1
|
259
827
|
|
260
|
-
|
828
|
+
# No secondary session available
|
829
|
+
raise HttpSessionError("No secondary session available for failover")
|
261
830
|
|
262
831
|
async def _perform_switch(self):
|
263
832
|
"""Perform hot switch between sessions"""
|
833
|
+
switch_context = {
|
834
|
+
"operation": "session_switch",
|
835
|
+
"primary_state": self._primary_state.value,
|
836
|
+
"secondary_state": self._secondary_state.value,
|
837
|
+
"primary_requests": self._primary_requests,
|
838
|
+
"secondary_requests": self._secondary_requests,
|
839
|
+
}
|
840
|
+
|
841
|
+
self.logger.info("Starting session switch", context=switch_context)
|
842
|
+
|
264
843
|
# 1. Promote secondary to active
|
265
844
|
self._secondary_state = SessionState.ACTIVE
|
266
845
|
|
@@ -280,25 +859,60 @@ class HttpClientCffi:
|
|
280
859
|
self._secondary_requests = old_primary_requests
|
281
860
|
self._secondary_state = SessionState.DRAINING
|
282
861
|
|
862
|
+
switch_context.update(
|
863
|
+
{
|
864
|
+
"switch_completed": True,
|
865
|
+
"new_primary_state": self._primary_state.value,
|
866
|
+
"new_secondary_state": self._secondary_state.value,
|
867
|
+
}
|
868
|
+
)
|
869
|
+
|
870
|
+
self.logger.info("Session switch completed", context=switch_context)
|
871
|
+
|
283
872
|
# 4. Async cleanup of old session
|
284
873
|
if old_primary:
|
285
874
|
asyncio.create_task(self._graceful_close_session(old_primary, lambda: self._secondary_requests))
|
286
875
|
|
287
876
|
async def _graceful_close_session(self, session: AsyncSession, get_request_count):
|
288
877
|
"""Gracefully close session after requests complete"""
|
878
|
+
close_context = {
|
879
|
+
"operation": "graceful_close",
|
880
|
+
"initial_request_count": get_request_count(),
|
881
|
+
}
|
882
|
+
|
883
|
+
self.logger.debug("Starting graceful session close", context=close_context)
|
884
|
+
|
289
885
|
# Wait for ongoing requests to complete (max 30 seconds)
|
290
886
|
start_time = datetime.now()
|
291
887
|
timeout = timedelta(seconds=30)
|
292
888
|
|
293
889
|
while get_request_count() > 0:
|
294
890
|
if datetime.now() - start_time > timeout:
|
891
|
+
close_context.update(
|
892
|
+
{
|
893
|
+
"timeout_reached": True,
|
894
|
+
"remaining_requests": get_request_count(),
|
895
|
+
}
|
896
|
+
)
|
897
|
+
|
898
|
+
self.logger.warning("Session close timeout reached", context=close_context)
|
295
899
|
break
|
296
900
|
|
297
901
|
await asyncio.sleep(0.1)
|
298
902
|
|
299
903
|
# Close session
|
300
|
-
|
904
|
+
try:
|
301
905
|
await session.close()
|
906
|
+
close_context.update({"close_successful": True})
|
907
|
+
self.logger.debug("Session closed successfully", context=close_context)
|
908
|
+
except Exception as e:
|
909
|
+
close_context.update(
|
910
|
+
{
|
911
|
+
"close_successful": False,
|
912
|
+
"close_error": str(e),
|
913
|
+
}
|
914
|
+
)
|
915
|
+
self.logger.warning("Error during session close", context=close_context)
|
302
916
|
|
303
917
|
def set_impersonate(self, browser: str):
|
304
918
|
"""
|
@@ -315,6 +929,14 @@ class HttpClientCffi:
|
|
315
929
|
- "firefox133", "firefox135", etc.: Specific Firefox versions
|
316
930
|
Note: "realworld" is replaced by our custom browser selector
|
317
931
|
"""
|
932
|
+
config_context = {
|
933
|
+
"operation": "set_impersonate",
|
934
|
+
"old_browser": self.client_kwargs.get("impersonate", "unknown"),
|
935
|
+
"new_browser": browser,
|
936
|
+
}
|
937
|
+
|
938
|
+
self.logger.info("Updating browser impersonation", context=config_context)
|
939
|
+
|
318
940
|
# Update client kwargs for future sessions
|
319
941
|
with self._lock:
|
320
942
|
self.client_kwargs["impersonate"] = browser
|
@@ -328,6 +950,14 @@ class HttpClientCffi:
|
|
328
950
|
new_kwargs: New configuration options
|
329
951
|
replace: If True, replace entire config. If False (default), merge with existing.
|
330
952
|
"""
|
953
|
+
config_context = {
|
954
|
+
"operation": "config_update",
|
955
|
+
"replace_mode": replace,
|
956
|
+
"new_config_keys": list(new_kwargs.keys()),
|
957
|
+
}
|
958
|
+
|
959
|
+
self.logger.info("Starting configuration update", context=config_context)
|
960
|
+
|
331
961
|
# Don't lock here - we want requests to continue
|
332
962
|
# Prepare new config
|
333
963
|
if replace:
|
@@ -338,27 +968,75 @@ class HttpClientCffi:
|
|
338
968
|
config = self.client_kwargs.copy()
|
339
969
|
config.update(new_kwargs)
|
340
970
|
|
341
|
-
#
|
342
|
-
if "
|
343
|
-
config.pop("proxy", None)
|
971
|
+
# Remove proxy if explicitly set to None
|
972
|
+
if "proxies" in new_kwargs and new_kwargs["proxies"] is None:
|
344
973
|
config.pop("proxies", None)
|
345
974
|
|
346
975
|
if "impersonate" not in config:
|
347
976
|
config["impersonate"] = get_random_browser()
|
348
977
|
|
349
978
|
# Create new session (secondary) without blocking
|
350
|
-
|
979
|
+
try:
|
980
|
+
new_session = AsyncSession(**config)
|
981
|
+
|
982
|
+
self.logger.debug("Created new session for config update", context=config_context)
|
983
|
+
except Exception as e:
|
984
|
+
error_context = config_context.copy()
|
985
|
+
error_context.update(
|
986
|
+
{
|
987
|
+
"error_type": type(e).__name__,
|
988
|
+
"error_message": str(e),
|
989
|
+
}
|
990
|
+
)
|
991
|
+
|
992
|
+
self.logger.error(
|
993
|
+
"Failed to create new session during config update: %s", str(e), context=error_context, exc_info=True
|
994
|
+
)
|
995
|
+
return
|
351
996
|
|
352
997
|
# Warm up new connection in background
|
998
|
+
warmup_start = time.time()
|
353
999
|
warmup_success = False
|
354
1000
|
try:
|
355
1001
|
warmup_url = self._create_absolute_url(self.warmup_url)
|
356
1002
|
response = await new_session.get(warmup_url)
|
1003
|
+
warmup_duration = time.time() - warmup_start
|
1004
|
+
|
357
1005
|
# Only consider warmup successful if we get 200 OK
|
358
1006
|
if response.status_code == 200:
|
359
1007
|
warmup_success = True
|
360
|
-
|
361
|
-
|
1008
|
+
|
1009
|
+
warmup_context = config_context.copy()
|
1010
|
+
warmup_context.update(
|
1011
|
+
{
|
1012
|
+
"warmup_success": True,
|
1013
|
+
"warmup_time_ms": round(warmup_duration * 1000, 2),
|
1014
|
+
"warmup_status": response.status_code,
|
1015
|
+
}
|
1016
|
+
)
|
1017
|
+
|
1018
|
+
self.logger.debug("Session warmup successful during config update", context=warmup_context)
|
1019
|
+
else:
|
1020
|
+
self.logger.warning(
|
1021
|
+
"Session warmup returned non-200 status during config update: %d",
|
1022
|
+
response.status_code,
|
1023
|
+
context=config_context,
|
1024
|
+
)
|
1025
|
+
|
1026
|
+
except Exception as e:
|
1027
|
+
warmup_duration = time.time() - warmup_start
|
1028
|
+
|
1029
|
+
warmup_context = config_context.copy()
|
1030
|
+
warmup_context.update(
|
1031
|
+
{
|
1032
|
+
"warmup_success": False,
|
1033
|
+
"warmup_time_ms": round(warmup_duration * 1000, 2),
|
1034
|
+
"error_type": type(e).__name__,
|
1035
|
+
"error_message": str(e),
|
1036
|
+
}
|
1037
|
+
)
|
1038
|
+
|
1039
|
+
self.logger.warning("Session warmup failed during config update: %s", str(e), context=warmup_context)
|
362
1040
|
|
363
1041
|
# Only proceed with switch if warmup was successful
|
364
1042
|
if warmup_success:
|
@@ -378,11 +1056,23 @@ class HttpClientCffi:
|
|
378
1056
|
with self._lock:
|
379
1057
|
self._stats["switches"] += 1
|
380
1058
|
self._stats["last_switch"] = datetime.now()
|
1059
|
+
|
1060
|
+
switch_context = config_context.copy()
|
1061
|
+
switch_context.update(
|
1062
|
+
{
|
1063
|
+
"switch_successful": True,
|
1064
|
+
"total_switches": self._stats["switches"],
|
1065
|
+
}
|
1066
|
+
)
|
1067
|
+
|
1068
|
+
self.logger.info("Configuration update and session switch completed", context=switch_context)
|
381
1069
|
else:
|
382
1070
|
# Clean up failed session
|
383
1071
|
with contextlib.suppress(Exception):
|
384
1072
|
await new_session.close()
|
385
1073
|
|
1074
|
+
self.logger.error("Configuration update failed due to warmup failure", context=config_context)
|
1075
|
+
|
386
1076
|
def update_client_kwargs(self, new_kwargs: dict[str, Any], merge: bool = True):
|
387
1077
|
"""
|
388
1078
|
Update client configuration at runtime.
|
@@ -390,24 +1080,15 @@ class HttpClientCffi:
|
|
390
1080
|
Args:
|
391
1081
|
new_kwargs: New configuration options to apply
|
392
1082
|
merge: If True, merge with existing kwargs. If False, replace entirely.
|
1083
|
+
"""
|
1084
|
+
config_context = {
|
1085
|
+
"operation": "update_client_kwargs",
|
1086
|
+
"merge_mode": merge,
|
1087
|
+
"new_config_keys": list(new_kwargs.keys()),
|
1088
|
+
}
|
393
1089
|
|
394
|
-
|
395
|
-
# Update proxy
|
396
|
-
client.update_client_kwargs({"proxies": {"https": "http://new-proxy:8080"}})
|
397
|
-
|
398
|
-
# Change impersonation
|
399
|
-
client.update_client_kwargs({"impersonate": "safari184"})
|
400
|
-
|
401
|
-
# Add custom headers
|
402
|
-
client.update_client_kwargs({"headers": {"X-Custom": "value"}})
|
1090
|
+
self.logger.debug("Updating client kwargs", context=config_context)
|
403
1091
|
|
404
|
-
# Replace all kwargs
|
405
|
-
client.update_client_kwargs({
|
406
|
-
"impersonate": "firefox135",
|
407
|
-
"timeout": 30,
|
408
|
-
"verify": False
|
409
|
-
}, merge=False)
|
410
|
-
"""
|
411
1092
|
with self._lock:
|
412
1093
|
if merge:
|
413
1094
|
self.client_kwargs.update(new_kwargs)
|
@@ -430,18 +1111,68 @@ class HttpClientCffi:
|
|
430
1111
|
|
431
1112
|
def get_stats(self) -> dict[str, Any]:
|
432
1113
|
"""
|
433
|
-
Get statistics.
|
1114
|
+
Get statistics including enhanced timing metrics.
|
1115
|
+
|
1116
|
+
Returns:
|
1117
|
+
Statistics dictionary with switches, requests, timing data, etc.
|
1118
|
+
"""
|
1119
|
+
with self._lock:
|
1120
|
+
stats = self._stats.copy()
|
1121
|
+
# Calculate additional metrics
|
1122
|
+
if stats["total_requests"] > 0:
|
1123
|
+
stats["success_rate"] = stats["successful_requests"] / stats["total_requests"]
|
1124
|
+
stats["failure_rate"] = stats["failed_requests"] / stats["total_requests"]
|
1125
|
+
if stats["retry_attempts"] > 0:
|
1126
|
+
stats["retry_success_rate"] = stats["retry_successes"] / stats["retry_attempts"]
|
1127
|
+
else:
|
1128
|
+
stats["retry_success_rate"] = 0.0
|
1129
|
+
else:
|
1130
|
+
stats["success_rate"] = 0.0
|
1131
|
+
stats["failure_rate"] = 0.0
|
1132
|
+
stats["retry_success_rate"] = 0.0
|
1133
|
+
|
1134
|
+
return stats
|
1135
|
+
|
1136
|
+
def update_retry_config(self, retry_config: RetryConfig):
|
1137
|
+
"""
|
1138
|
+
Update retry configuration at runtime.
|
1139
|
+
|
1140
|
+
Args:
|
1141
|
+
retry_config: New retry configuration
|
1142
|
+
"""
|
1143
|
+
config_context = {
|
1144
|
+
"operation": "update_retry_config",
|
1145
|
+
"max_retries": retry_config.max_retries,
|
1146
|
+
"base_delay": retry_config.base_delay,
|
1147
|
+
}
|
1148
|
+
|
1149
|
+
self.logger.debug("Updating retry configuration", context=config_context)
|
1150
|
+
|
1151
|
+
with self._lock:
|
1152
|
+
self.retry_config = retry_config
|
1153
|
+
|
1154
|
+
def get_retry_config(self) -> RetryConfig:
|
1155
|
+
"""
|
1156
|
+
Get current retry configuration.
|
434
1157
|
|
435
1158
|
Returns:
|
436
|
-
|
1159
|
+
Current retry configuration
|
437
1160
|
"""
|
438
1161
|
with self._lock:
|
439
|
-
return self.
|
1162
|
+
return self.retry_config
|
440
1163
|
|
441
1164
|
async def close(self):
|
442
1165
|
"""
|
443
1166
|
Close all sessions gracefully.
|
444
1167
|
"""
|
1168
|
+
close_context = {
|
1169
|
+
"operation": "close_all_sessions",
|
1170
|
+
"primary_state": self._primary_state.value if self._primary_state else "none",
|
1171
|
+
"secondary_state": self._secondary_state.value if self._secondary_state else "none",
|
1172
|
+
}
|
1173
|
+
|
1174
|
+
self.logger.info("Closing all HTTP sessions", context=close_context)
|
1175
|
+
|
445
1176
|
tasks = []
|
446
1177
|
|
447
1178
|
if self._primary_session:
|
@@ -457,4 +1188,15 @@ class HttpClientCffi:
|
|
457
1188
|
self._sync_secondary.close()
|
458
1189
|
|
459
1190
|
if tasks:
|
460
|
-
|
1191
|
+
try:
|
1192
|
+
await asyncio.gather(*tasks, return_exceptions=True)
|
1193
|
+
close_context.update({"async_close_successful": "true"})
|
1194
|
+
except Exception as e:
|
1195
|
+
close_context.update(
|
1196
|
+
{
|
1197
|
+
"async_close_successful": "false",
|
1198
|
+
"close_error": str(e),
|
1199
|
+
}
|
1200
|
+
)
|
1201
|
+
|
1202
|
+
self.logger.info("HTTP sessions closed", context=close_context)
|