dexscreen 0.0.1__py3-none-any.whl → 0.0.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,357 @@
1
+ """
2
+ Retry mechanism with exponential backoff for network operations
3
+ """
4
+
5
+ import asyncio
6
+ import logging
7
+ import random
8
+ import time
9
+ from dataclasses import dataclass, field
10
+ from functools import wraps
11
+ from typing import Any, Callable, Optional, TypeVar
12
+
13
+ import curl_cffi.requests.exceptions
14
+
15
+ # Type variables for generic function decorators
16
+ F = TypeVar("F", bound=Callable[..., Any])
17
+ AsyncF = TypeVar("AsyncF", bound=Callable[..., Any])
18
+
19
+ logger = logging.getLogger(__name__)
20
+
21
+
22
+ @dataclass
23
+ class RetryConfig:
24
+ """Configuration for retry behavior"""
25
+
26
+ max_retries: int = 3
27
+ base_delay: float = 1.0 # Base delay in seconds
28
+ max_delay: float = 60.0 # Maximum delay in seconds
29
+ backoff_factor: float = 2.0 # Exponential backoff multiplier
30
+ jitter: bool = True # Add random jitter to prevent thundering herd
31
+ retryable_status_codes: set[int] = field(
32
+ default_factory=lambda: {
33
+ 408, # Request Timeout
34
+ 429, # Too Many Requests
35
+ 500, # Internal Server Error
36
+ 502, # Bad Gateway
37
+ 503, # Service Unavailable
38
+ 504, # Gateway Timeout
39
+ 520, # Web Server Returned an Unknown Error
40
+ 521, # Web Server Is Down
41
+ 522, # Connection Timed Out
42
+ 523, # Origin Is Unreachable
43
+ 524, # A Timeout Occurred
44
+ }
45
+ )
46
+ retryable_exceptions: tuple[type[Exception], ...] = field(
47
+ default_factory=lambda: (
48
+ # Network-related exceptions
49
+ curl_cffi.requests.exceptions.ConnectionError,
50
+ curl_cffi.requests.exceptions.Timeout,
51
+ curl_cffi.requests.exceptions.ReadTimeout,
52
+ curl_cffi.requests.exceptions.ConnectTimeout,
53
+ # OS-level network errors
54
+ OSError,
55
+ ConnectionError,
56
+ TimeoutError,
57
+ )
58
+ )
59
+
60
+ def __post_init__(self):
61
+ """Validate configuration"""
62
+ if self.max_retries < 0:
63
+ raise ValueError("max_retries must be non-negative")
64
+ if self.base_delay <= 0:
65
+ raise ValueError("base_delay must be positive")
66
+ if self.max_delay <= 0:
67
+ raise ValueError("max_delay must be positive")
68
+ if self.backoff_factor <= 1:
69
+ raise ValueError("backoff_factor must be greater than 1")
70
+
71
+
72
+ class RetryError(Exception):
73
+ """Raised when all retry attempts have been exhausted"""
74
+
75
+ def __init__(self, message: str, original_exception: Exception, attempts: int):
76
+ super().__init__(message)
77
+ self.original_exception = original_exception
78
+ self.attempts = attempts
79
+
80
+
81
+ def calculate_delay(attempt: int, config: RetryConfig) -> float:
82
+ """
83
+ Calculate delay for exponential backoff with optional jitter.
84
+
85
+ Args:
86
+ attempt: Current attempt number (0-based)
87
+ config: Retry configuration
88
+
89
+ Returns:
90
+ Delay in seconds
91
+ """
92
+ # Calculate exponential delay
93
+ delay = min(config.base_delay * (config.backoff_factor**attempt), config.max_delay)
94
+
95
+ # Add jitter if enabled
96
+ if config.jitter:
97
+ # Add random jitter up to 25% of the delay
98
+ jitter = delay * 0.25 * random.random()
99
+ delay += jitter
100
+
101
+ return delay
102
+
103
+
104
+ def is_retryable(exception: Exception, config: RetryConfig) -> bool:
105
+ """
106
+ Determine if an exception is retryable based on configuration.
107
+
108
+ Args:
109
+ exception: The exception to check
110
+ config: Retry configuration
111
+
112
+ Returns:
113
+ True if the exception should trigger a retry
114
+ """
115
+ # Check if it's a retryable exception type
116
+ if isinstance(exception, config.retryable_exceptions):
117
+ return True
118
+
119
+ # Check if it's an HTTP response with retryable status code
120
+ if hasattr(exception, "response") and hasattr(exception.response, "status_code"): # type: ignore[attr-defined]
121
+ return exception.response.status_code in config.retryable_status_codes # type: ignore[attr-defined]
122
+
123
+ # Check for curl_cffi specific status codes
124
+ try:
125
+ if (
126
+ hasattr(curl_cffi.requests.exceptions, "HTTPError")
127
+ and isinstance(exception, curl_cffi.requests.exceptions.HTTPError)
128
+ and hasattr(exception, "response")
129
+ and exception.response is not None
130
+ and hasattr(exception.response, "status_code")
131
+ ):
132
+ return exception.response.status_code in config.retryable_status_codes
133
+ except AttributeError:
134
+ # curl_cffi.requests.exceptions.HTTPError doesn't exist, skip this check
135
+ pass
136
+
137
+ return False
138
+
139
+
140
+ def retry_sync(config: Optional[RetryConfig] = None):
141
+ """
142
+ Decorator for synchronous functions with retry logic.
143
+
144
+ Args:
145
+ config: Retry configuration. If None, uses default RetryConfig.
146
+
147
+ Returns:
148
+ Decorated function
149
+ """
150
+ if config is None:
151
+ config = RetryConfig()
152
+
153
+ def decorator(func: F) -> F:
154
+ @wraps(func)
155
+ def wrapper(*args, **kwargs):
156
+ last_exception = None
157
+
158
+ for attempt in range(config.max_retries + 1):
159
+ try:
160
+ result = func(*args, **kwargs)
161
+
162
+ # Log successful retry if this wasn't the first attempt
163
+ if attempt > 0:
164
+ logger.info("Function %s succeeded after %d retries", func.__name__, attempt)
165
+
166
+ return result
167
+
168
+ except Exception as e:
169
+ last_exception = e
170
+
171
+ # Check if we should retry
172
+ if attempt < config.max_retries and is_retryable(e, config):
173
+ delay = calculate_delay(attempt, config)
174
+
175
+ logger.warning(
176
+ "Function %s failed (attempt %d/%d): %s. Retrying in %.2f seconds",
177
+ func.__name__,
178
+ attempt + 1,
179
+ config.max_retries + 1,
180
+ str(e),
181
+ delay,
182
+ )
183
+
184
+ time.sleep(delay)
185
+ else:
186
+ # Not retryable or max retries exceeded
187
+ break
188
+
189
+ # All retries exhausted
190
+ error_msg = f"Function {func.__name__} failed after {config.max_retries + 1} attempts"
191
+ if last_exception:
192
+ logger.error("%s. Last error: %s", error_msg, str(last_exception))
193
+ raise RetryError(error_msg, last_exception, config.max_retries + 1)
194
+ else:
195
+ # This should never happen, but handle it gracefully
196
+ logger.error(error_msg)
197
+ raise RuntimeError(error_msg)
198
+
199
+ return wrapper # type: ignore[return-value]
200
+
201
+ return decorator
202
+
203
+
204
+ def retry_async(config: Optional[RetryConfig] = None):
205
+ """
206
+ Decorator for asynchronous functions with retry logic.
207
+
208
+ Args:
209
+ config: Retry configuration. If None, uses default RetryConfig.
210
+
211
+ Returns:
212
+ Decorated function
213
+ """
214
+ if config is None:
215
+ config = RetryConfig()
216
+
217
+ def decorator(func: AsyncF) -> AsyncF:
218
+ @wraps(func)
219
+ async def wrapper(*args, **kwargs):
220
+ last_exception = None
221
+
222
+ for attempt in range(config.max_retries + 1):
223
+ try:
224
+ result = await func(*args, **kwargs)
225
+
226
+ # Log successful retry if this wasn't the first attempt
227
+ if attempt > 0:
228
+ logger.info("Function %s succeeded after %d retries", func.__name__, attempt)
229
+
230
+ return result
231
+
232
+ except Exception as e:
233
+ last_exception = e
234
+
235
+ # Check if we should retry
236
+ if attempt < config.max_retries and is_retryable(e, config):
237
+ delay = calculate_delay(attempt, config)
238
+
239
+ logger.warning(
240
+ "Function %s failed (attempt %d/%d): %s. Retrying in %.2f seconds",
241
+ func.__name__,
242
+ attempt + 1,
243
+ config.max_retries + 1,
244
+ str(e),
245
+ delay,
246
+ )
247
+
248
+ await asyncio.sleep(delay)
249
+ else:
250
+ # Not retryable or max retries exceeded
251
+ break
252
+
253
+ # All retries exhausted
254
+ error_msg = f"Function {func.__name__} failed after {config.max_retries + 1} attempts"
255
+ if last_exception:
256
+ logger.error("%s. Last error: %s", error_msg, str(last_exception))
257
+ raise RetryError(error_msg, last_exception, config.max_retries + 1)
258
+ else:
259
+ # This should never happen, but handle it gracefully
260
+ logger.error(error_msg)
261
+ raise RuntimeError(error_msg)
262
+
263
+ return wrapper # type: ignore[return-value]
264
+
265
+ return decorator
266
+
267
+
268
+ class RetryManager:
269
+ """
270
+ Context manager for manual retry operations.
271
+ Useful when you need fine-grained control over retry logic.
272
+ """
273
+
274
+ def __init__(self, config: Optional[RetryConfig] = None):
275
+ self.config = config or RetryConfig()
276
+ self.attempt = 0
277
+ self.last_exception: Optional[Exception] = None
278
+
279
+ def should_retry(self, exception: Exception) -> bool:
280
+ """Check if we should retry given an exception"""
281
+ if self.attempt >= self.config.max_retries:
282
+ return False
283
+ return is_retryable(exception, self.config)
284
+
285
+ def record_failure(self, exception: Exception):
286
+ """Record a failure and prepare for potential retry"""
287
+ self.last_exception = exception
288
+ self.attempt += 1
289
+
290
+ def calculate_delay(self) -> float:
291
+ """Calculate delay for current attempt"""
292
+ return calculate_delay(self.attempt - 1, self.config)
293
+
294
+ def wait_sync(self):
295
+ """Wait synchronously for the calculated delay"""
296
+ if self.last_exception and self.should_retry(self.last_exception):
297
+ delay = self.calculate_delay()
298
+ logger.warning(
299
+ "Retrying after %.2f seconds (attempt %d/%d): %s",
300
+ delay,
301
+ self.attempt,
302
+ self.config.max_retries + 1,
303
+ str(self.last_exception),
304
+ )
305
+ time.sleep(delay)
306
+
307
+ async def wait_async(self):
308
+ """Wait asynchronously for the calculated delay"""
309
+ if self.last_exception and self.should_retry(self.last_exception):
310
+ delay = self.calculate_delay()
311
+ logger.warning(
312
+ "Retrying after %.2f seconds (attempt %d/%d): %s",
313
+ delay,
314
+ self.attempt,
315
+ self.config.max_retries + 1,
316
+ str(self.last_exception),
317
+ )
318
+ await asyncio.sleep(delay)
319
+
320
+ def raise_if_exhausted(self, operation_name: str = "Operation"):
321
+ """Raise RetryError if all retries are exhausted"""
322
+ if self.attempt > self.config.max_retries and self.last_exception:
323
+ error_msg = f"{operation_name} failed after {self.attempt} attempts"
324
+ logger.error("%s. Last error: %s", error_msg, str(self.last_exception))
325
+ raise RetryError(error_msg, self.last_exception, self.attempt)
326
+
327
+
328
+ # Predefined retry configurations for common scenarios
329
+ class RetryPresets:
330
+ """Predefined retry configurations for common use cases"""
331
+
332
+ @staticmethod
333
+ def network_operations() -> RetryConfig:
334
+ """Conservative retry for network operations"""
335
+ return RetryConfig(max_retries=3, base_delay=1.0, max_delay=30.0, backoff_factor=2.0, jitter=True)
336
+
337
+ @staticmethod
338
+ def api_calls() -> RetryConfig:
339
+ """Moderate retry for API calls"""
340
+ return RetryConfig(max_retries=5, base_delay=0.5, max_delay=60.0, backoff_factor=1.5, jitter=True)
341
+
342
+ @staticmethod
343
+ def aggressive() -> RetryConfig:
344
+ """Aggressive retry for critical operations"""
345
+ return RetryConfig(max_retries=10, base_delay=0.1, max_delay=120.0, backoff_factor=1.8, jitter=True)
346
+
347
+ @staticmethod
348
+ def rate_limit_heavy() -> RetryConfig:
349
+ """Retry configuration optimized for rate-limited APIs"""
350
+ return RetryConfig(
351
+ max_retries=8,
352
+ base_delay=2.0,
353
+ max_delay=300.0, # 5 minutes max
354
+ backoff_factor=2.5,
355
+ jitter=True,
356
+ retryable_status_codes={429, 500, 502, 503, 504}, # Focus on rate limits and server errors
357
+ )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dexscreen
3
- Version: 0.0.1
3
+ Version: 0.0.4
4
4
  Summary: Python wrapper for Dexscreener API with stable HTTP support
5
5
  Project-URL: Repository, https://github.com/solanab/dexscreen
6
6
  Project-URL: Documentation, https://github.com/solanab/dexscreen#readme
@@ -68,8 +68,12 @@ pip install dexscreen
68
68
  ```python
69
69
  from dexscreen import DexscreenerClient
70
70
 
71
+ # Default client with 10-second timeout
71
72
  client = DexscreenerClient()
72
73
 
74
+ # Custom timeout client
75
+ client = DexscreenerClient(client_kwargs={"timeout": 30})
76
+
73
77
  # Get a specific pair by token address
74
78
  pairs = client.get_pairs_by_token_address("solana", "JUPyiwrYJFskUPiHa7hkeR8VUtAeFoSYbKedZNsDvCN")
75
79
  if pairs:
@@ -259,6 +263,47 @@ The SDK automatically handles rate limiting:
259
263
  - 60 requests/minute for token profile endpoints
260
264
  - 300 requests/minute for pair data endpoints
261
265
 
266
+ ## Timeout Configuration
267
+
268
+ The SDK provides flexible timeout configuration for different use cases:
269
+
270
+ ### Default Timeout
271
+ ```python
272
+ # Default timeout is 10 seconds
273
+ client = DexscreenerClient()
274
+ ```
275
+
276
+ ### Custom Timeout
277
+ ```python
278
+ # Set custom timeout during initialization
279
+ client = DexscreenerClient(client_kwargs={"timeout": 30})
280
+
281
+ # Different timeouts for different scenarios
282
+ fast_client = DexscreenerClient(client_kwargs={"timeout": 5}) # Quick responses
283
+ stable_client = DexscreenerClient(client_kwargs={"timeout": 60}) # Stable connections
284
+ ```
285
+
286
+ ### Runtime Timeout Updates
287
+ ```python
288
+ # Update timeout at runtime
289
+ await client._client_300rpm.update_config({"timeout": 15})
290
+
291
+ # Multiple config updates including timeout
292
+ await client._client_300rpm.update_config({
293
+ "timeout": 25,
294
+ "impersonate": "chrome136"
295
+ })
296
+ ```
297
+
298
+ ### Recommended Timeout Values
299
+
300
+ | Use Case | Timeout (seconds) | Description |
301
+ |----------|------------------|-------------|
302
+ | Quick Trading | 5-10 | Fast response for time-sensitive operations |
303
+ | General Use | 10-15 | Default balanced setting |
304
+ | Stable Monitoring | 20-30 | Reliable for long-running subscriptions |
305
+ | Poor Networks | 30-60 | Handle unstable connections |
306
+
262
307
  ## Browser Impersonation
263
308
 
264
309
  The SDK uses curl_cffi for browser impersonation to bypass anti-bot protection:
@@ -267,6 +312,12 @@ The SDK uses curl_cffi for browser impersonation to bypass anti-bot protection:
267
312
  # Use different browser versions
268
313
  client = DexscreenerClient(impersonate="chrome134")
269
314
  client = DexscreenerClient(impersonate="safari180")
315
+
316
+ # Combine browser impersonation with custom timeout
317
+ client = DexscreenerClient(
318
+ impersonate="chrome136",
319
+ client_kwargs={"timeout": 20}
320
+ )
270
321
  ```
271
322
 
272
323
  ## Contributing
@@ -0,0 +1,22 @@
1
+ dexscreen/__init__.py,sha256=TZfBw2mQ_oy5c7kJ82GP5tu5fSm_vCZJGBOIRULGn6E,2639
2
+ dexscreen/api/__init__.py,sha256=_fFBxC2rrc4nzeRFS_0MQM3cNFrz-ZtvMKrKXX9gtyI,71
3
+ dexscreen/api/client.py,sha256=ic_EcFpnr4VfL3chHIpaovvBhf2qYRlLwTT21ssFkr8,37847
4
+ dexscreen/config/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
+ dexscreen/core/__init__.py,sha256=_QvgIu4e9RG06TrCTC-BIJMJ5duEFwp1LLTZ7sG4YuM,490
6
+ dexscreen/core/exceptions.py,sha256=IKL-_yWJ3fjmdj09QxOLHTF0zHeinkL0IY7bL51VpmQ,34339
7
+ dexscreen/core/http.py,sha256=T8BFQoQBsiOnVtHb02uZaQ3N8MwqE3C_d-xSVPbHQ34,49096
8
+ dexscreen/core/models.py,sha256=7-EN63yyQK92T1ZRhfHwwXJ8Cg1VbO3CY-wdlpljIu4,2573
9
+ dexscreen/core/validators.py,sha256=BFoey2XangE_i7bxLtuBlRVL-PbUh4ZIrGPq0seRvVI,14536
10
+ dexscreen/stream/__init__.py,sha256=vqoiFQ4LwLjaIEWl7p60StTfG1bSJhyEapjWILVP168,100
11
+ dexscreen/stream/polling.py,sha256=nH12bFvB81HK_efQRcWqXuBlZgKqQCwnLCP5Z1EY7Rk,26462
12
+ dexscreen/utils/__init__.py,sha256=H_70Na9bPmT-5kYgmFbG7_n2MRTJ4PiPIkVyZjhTy7U,1282
13
+ dexscreen/utils/browser_selector.py,sha256=OB14lfSO6zyen3Qga2Cw2Xc9u9_Y5OkEuIdgLeG2_TA,1186
14
+ dexscreen/utils/filters.py,sha256=OnFH_9ZjzkqMZ2804E3V3etSPzTn7FCU5_rJoKOTwC4,14872
15
+ dexscreen/utils/logging_config.py,sha256=M5CJNx8rqZsYTAQF-yT1wVQgj7TvEmR87wJkDCm8_sk,14170
16
+ dexscreen/utils/middleware.py,sha256=7oq628-W9bgoIoAx4W48V-dl3G0WVPThVgwhCCN8q58,12758
17
+ dexscreen/utils/ratelimit.py,sha256=u6VqMVV5mV5XLJpQSPvDFo618Y29rPt76PN0aArnIDo,10203
18
+ dexscreen/utils/retry.py,sha256=-nwhU3GwJULLxCoD4er_wh1EWxh96tf4uDzEdFsrEmQ,12536
19
+ dexscreen-0.0.4.dist-info/METADATA,sha256=uBPvq-c4Mr8IHbkwwUOz51r-lu-198Xt5FHpXWv16eQ,9345
20
+ dexscreen-0.0.4.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
21
+ dexscreen-0.0.4.dist-info/licenses/LICENSE,sha256=YLNduNj40Iu4upUYI6XEXrgBlv0hxMNf6uEVAJITYN0,1064
22
+ dexscreen-0.0.4.dist-info/RECORD,,
@@ -1,17 +0,0 @@
1
- dexscreen/__init__.py,sha256=W7jJhDNMyzsLsOfQefZJ4pEphfmoIYQbj7rViLplgVg,663
2
- dexscreen/api/__init__.py,sha256=_fFBxC2rrc4nzeRFS_0MQM3cNFrz-ZtvMKrKXX9gtyI,71
3
- dexscreen/api/client.py,sha256=_5iQjzx7X4nLhkFimLOPI6dVOcXopgZzya1UNJJns4I,28502
4
- dexscreen/config/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
- dexscreen/core/__init__.py,sha256=_QvgIu4e9RG06TrCTC-BIJMJ5duEFwp1LLTZ7sG4YuM,490
6
- dexscreen/core/http.py,sha256=pimTHfsV1v2txnlPk8DIaKesxpZi6cFATm9Z8wlS-No,16327
7
- dexscreen/core/models.py,sha256=7-EN63yyQK92T1ZRhfHwwXJ8Cg1VbO3CY-wdlpljIu4,2573
8
- dexscreen/stream/__init__.py,sha256=vqoiFQ4LwLjaIEWl7p60StTfG1bSJhyEapjWILVP168,100
9
- dexscreen/stream/polling.py,sha256=yltLwKHQJkN2RJAkgJFxzKvRwcv9M3DYHl19D_ZYSog,17517
10
- dexscreen/utils/__init__.py,sha256=2IXzfN8UFzC4iMX3IoK_9rUY-0CtCCsdaFa66Bdv2zw,180
11
- dexscreen/utils/browser_selector.py,sha256=OB14lfSO6zyen3Qga2Cw2Xc9u9_Y5OkEuIdgLeG2_TA,1186
12
- dexscreen/utils/filters.py,sha256=rAgcMOLi3VgTvi4ga6p2dBQ-hWlU3DURpCrabyV-V2g,8145
13
- dexscreen/utils/ratelimit.py,sha256=NncESbX_8RHPSA9vpxH-vnFwoPwssvooOZthqEQ0-G0,1570
14
- dexscreen-0.0.1.dist-info/METADATA,sha256=imRHIwcAMcMZG_67e8ccYt0z0ueNPWvLgUpm4gqOyE4,7881
15
- dexscreen-0.0.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
16
- dexscreen-0.0.1.dist-info/licenses/LICENSE,sha256=YLNduNj40Iu4upUYI6XEXrgBlv0hxMNf6uEVAJITYN0,1064
17
- dexscreen-0.0.1.dist-info/RECORD,,