esuls 0.1.11__tar.gz → 0.1.13__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: esuls
3
- Version: 0.1.11
3
+ Version: 0.1.13
4
4
  Summary: Utility library for async database operations, HTTP requests, and parallel execution
5
5
  Author-email: IperGiove <ipergiove@gmail.com>
6
6
  License: MIT
@@ -15,7 +15,7 @@ License-File: LICENSE
15
15
  Requires-Dist: aiosqlite>=0.21.0
16
16
  Requires-Dist: curl-cffi>=0.13.0
17
17
  Requires-Dist: fake-useragent>=2.2.0
18
- Requires-Dist: httpx>=0.28.1
18
+ Requires-Dist: httpx[http2]>=0.28.1
19
19
  Requires-Dist: loguru>=0.7.3
20
20
  Requires-Dist: pillow>=12.0.0
21
21
  Requires-Dist: python-magic>=0.4.27
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "esuls"
7
- version = "0.1.11"
7
+ version = "0.1.13"
8
8
  description = "Utility library for async database operations, HTTP requests, and parallel execution"
9
9
  readme = "README.md"
10
10
  requires-python = ">=3.14"
@@ -23,7 +23,7 @@ dependencies = [
23
23
  "aiosqlite>=0.21.0",
24
24
  "curl-cffi>=0.13.0",
25
25
  "fake-useragent>=2.2.0",
26
- "httpx>=0.28.1",
26
+ "httpx[http2]>=0.28.1",
27
27
  "loguru>=0.7.3",
28
28
  "pillow>=12.0.0",
29
29
  "python-magic>=0.4.27",
@@ -57,20 +57,33 @@ class AsyncDB(Generic[SchemaType]):
57
57
  if not hasattr(AsyncDB, '_initialized_schemas'):
58
58
  AsyncDB._initialized_schemas = set()
59
59
 
60
- async def _get_connection(self) -> aiosqlite.Connection:
61
- """Create a new optimized connection."""
62
- db = await aiosqlite.connect(self.db_path)
63
- # Fast WAL mode with minimal sync
64
- await db.execute("PRAGMA journal_mode=WAL")
65
- await db.execute("PRAGMA synchronous=NORMAL")
66
- await db.execute("PRAGMA cache_size=10000")
67
-
68
- # Initialize schema if needed (check per unique schema)
69
- if self._db_key not in AsyncDB._initialized_schemas:
70
- await self._init_schema(db)
71
- AsyncDB._initialized_schemas.add(self._db_key)
72
-
73
- return db
60
+ async def _get_connection(self, max_retries: int = 5) -> aiosqlite.Connection:
61
+ """Create a new optimized connection with retry logic for concurrent access."""
62
+ last_error = None
63
+ for attempt in range(max_retries):
64
+ try:
65
+ db = await aiosqlite.connect(self.db_path, timeout=30.0)
66
+ # Fast WAL mode with minimal sync
67
+ await db.execute("PRAGMA journal_mode=WAL")
68
+ await db.execute("PRAGMA synchronous=NORMAL")
69
+ await db.execute("PRAGMA cache_size=10000")
70
+ await db.execute("PRAGMA busy_timeout=30000") # 30s busy timeout
71
+
72
+ # Initialize schema if needed (check per unique schema)
73
+ if self._db_key not in AsyncDB._initialized_schemas:
74
+ await self._init_schema(db)
75
+ AsyncDB._initialized_schemas.add(self._db_key)
76
+
77
+ return db
78
+ except Exception as e:
79
+ last_error = e
80
+ if attempt < max_retries - 1:
81
+ # Exponential backoff: 0.1s, 0.2s, 0.4s, 0.8s, 1.6s
82
+ wait_time = 0.1 * (2 ** attempt)
83
+ await asyncio.sleep(wait_time)
84
+ continue
85
+ raise
86
+ raise last_error
74
87
 
75
88
  async def _init_schema(self, db: aiosqlite.Connection) -> None:
76
89
  """Generate schema from dataclass structure with support for field additions."""
@@ -1,24 +1,28 @@
1
1
  from dataclasses import dataclass
2
2
  from functools import lru_cache
3
- from typing import TypeAlias, Union, Optional, Dict, Any, TypeVar, AsyncContextManager, Literal
3
+ from typing import TypeAlias, Union, Optional, Dict, Any, AsyncContextManager, Literal
4
+ from urllib.parse import urlparse
4
5
  import asyncio
5
6
  import json
7
+ import random
6
8
  import ssl
7
9
  from loguru import logger
8
10
  import httpx
9
11
  from fake_useragent import UserAgent
10
12
  from curl_cffi.requests import AsyncSession
11
13
 
12
- # Type definitions optimized
13
- ResponseT = TypeVar('ResponseT', bound='Response')
14
+ # Type definitions
14
15
  JsonType: TypeAlias = Dict[str, Any]
15
16
  FileData: TypeAlias = tuple[str, Union[bytes, str], str]
16
17
  Headers: TypeAlias = Dict[str, str]
17
- HttpMethod: TypeAlias = Literal["GET", "POST",
18
- "PUT", "DELETE", "PATCH", "HEAD", "OPTIONS"]
18
+ HttpMethod: TypeAlias = Literal["GET", "POST", "PUT", "DELETE", "PATCH", "HEAD", "OPTIONS"]
19
19
 
20
- # Global shared client with connection pooling to prevent "Too many open files" error
21
- _shared_client: Optional[httpx.AsyncClient] = None
20
+ # Constants
21
+ _FALLBACK_USER_AGENT = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36"
22
+ _SUCCESS_STATUS_RANGE = range(200, 300)
23
+
24
+ # Global connection pool per domain to prevent "Too many open files" error
25
+ _domain_clients: Dict[str, httpx.AsyncClient] = {}
22
26
  _client_lock = asyncio.Lock()
23
27
 
24
28
  # Global cached UserAgent to prevent file descriptor exhaustion
@@ -27,24 +31,21 @@ _user_agent_lock = asyncio.Lock()
27
31
 
28
32
 
29
33
  async def _get_user_agent() -> str:
30
- """Get or create cached UserAgent instance to avoid file descriptor leaks"""
34
+ """Get or create cached UserAgent instance to avoid file descriptor leaks."""
31
35
  global _user_agent
32
36
  async with _user_agent_lock:
33
37
  if _user_agent is None:
34
38
  try:
35
39
  _user_agent = UserAgent()
36
- except Exception as e:
37
- # Fallback to a static user agent if UserAgent() fails
38
- logger.warning(
39
- f"Failed to initialize UserAgent, using fallback: {e}")
40
- return "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36"
40
+ except (OSError, IOError) as e:
41
+ logger.warning(f"Failed to initialize UserAgent, using fallback: {e}")
42
+ return _FALLBACK_USER_AGENT
41
43
 
42
44
  try:
43
45
  return _user_agent.random
44
- except Exception as e:
45
- logger.warning(
46
- f"Failed to get random user agent, using fallback: {e}")
47
- return "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36"
46
+ except (AttributeError, IndexError) as e:
47
+ logger.warning(f"Failed to get random user agent, using fallback: {e}")
48
+ return _FALLBACK_USER_AGENT
48
49
 
49
50
 
50
51
  @lru_cache(maxsize=1)
@@ -58,22 +59,37 @@ def _create_optimized_ssl_context() -> ssl.SSLContext:
58
59
  return ctx
59
60
 
60
61
 
61
- async def _get_shared_client() -> httpx.AsyncClient:
62
- """Get or create shared HTTP client with connection pooling"""
63
- global _shared_client
62
+ def _extract_domain(url: str) -> str:
63
+ """Extract domain from URL for connection pooling."""
64
+ parsed = urlparse(url)
65
+ return f"{parsed.scheme}://{parsed.netloc}"
66
+
67
+
68
+ def _apply_jitter(delay: float, jitter: float) -> float:
69
+ """Add random jitter to delay to prevent thundering herd."""
70
+ if jitter <= 0:
71
+ return delay
72
+ return delay + random.uniform(0, delay * jitter)
73
+
74
+
75
+ async def _get_domain_client(url: str, http2: bool = True) -> httpx.AsyncClient:
76
+ """Get or create HTTP client for a specific domain with connection pooling"""
77
+ domain = _extract_domain(url)
78
+ cache_key = f"{domain}:{'h2' if http2 else 'h1'}"
64
79
  async with _client_lock:
65
- if _shared_client is None or _shared_client.is_closed:
66
- _shared_client = httpx.AsyncClient(
80
+ if cache_key not in _domain_clients or _domain_clients[cache_key].is_closed:
81
+ _domain_clients[cache_key] = httpx.AsyncClient(
67
82
  verify=_create_optimized_ssl_context(),
68
83
  timeout=60,
69
84
  follow_redirects=True,
85
+ http2=http2,
70
86
  limits=httpx.Limits(
71
- max_connections=100,
72
- max_keepalive_connections=50,
87
+ max_connections=20,
88
+ max_keepalive_connections=10,
73
89
  keepalive_expiry=30.0
74
90
  )
75
91
  )
76
- return _shared_client
92
+ return _domain_clients[cache_key]
77
93
 
78
94
 
79
95
  @dataclass(frozen=True)
@@ -83,6 +99,7 @@ class Response:
83
99
  headers: Headers
84
100
  _content: bytes
85
101
  text: str
102
+ url: str = "" # final URL after redirects
86
103
 
87
104
  @property
88
105
  def content(self) -> bytes:
@@ -93,22 +110,12 @@ class Response:
93
110
 
94
111
 
95
112
  class AsyncRequest(AsyncContextManager['AsyncRequest']):
113
+ """Context manager for HTTP requests with automatic client lifecycle."""
114
+
96
115
  def __init__(self) -> None:
97
- # self._logger = logging.getLogger(__name__)
98
- self._ssl_context = self._create_optimized_ssl_context()
116
+ self._ssl_context = _create_optimized_ssl_context()
99
117
  self._client: Optional[httpx.AsyncClient] = None
100
118
 
101
- @staticmethod
102
- @lru_cache(maxsize=1)
103
- def _create_optimized_ssl_context() -> ssl.SSLContext:
104
- """Create an SSL context optimized for performance"""
105
- ctx = ssl._create_default_https_context()
106
- ctx.check_hostname = False
107
- ctx.verify_mode = ssl.CERT_NONE
108
- ctx.set_alpn_protocols(['http/1.1'])
109
- ctx.post_handshake_auth = True
110
- return ctx
111
-
112
119
  async def request(
113
120
  self,
114
121
  url: str,
@@ -171,11 +178,12 @@ class AsyncRequest(AsyncContextManager['AsyncRequest']):
171
178
  status_code=httpx_response.status_code,
172
179
  headers=dict(httpx_response.headers),
173
180
  _content=httpx_response.content,
174
- text=httpx_response.text
181
+ text=httpx_response.text,
182
+ url=str(httpx_response.url),
175
183
  )
176
184
 
177
185
  # Handle unsuccessful status codes
178
- if response.status_code not in range(200, 300):
186
+ if response.status_code not in _SUCCESS_STATUS_RANGE:
179
187
  logger.warning(
180
188
  f"Request: {response.status_code}\n"
181
189
  f"Attempt {attempt + 1}/{max_attempt}\n"
@@ -185,9 +193,7 @@ class AsyncRequest(AsyncContextManager['AsyncRequest']):
185
193
  f"Request data: {json_data}\n"
186
194
  )
187
195
  if skip_response:
188
- patterns = [skip_response] if isinstance(
189
- skip_response, str) else skip_response
190
- # Skip if patterns list is empty
196
+ patterns = [skip_response] if isinstance(skip_response, str) else skip_response
191
197
  if patterns and any(pattern in response.text for pattern in patterns if pattern):
192
198
  return response if force_response else None
193
199
 
@@ -196,7 +202,7 @@ class AsyncRequest(AsyncContextManager['AsyncRequest']):
196
202
 
197
203
  # Exponential backoff for 429 (rate limit)
198
204
  if response.status_code == 429:
199
- backoff = min(120, exception_sleep * (2 ** attempt))
205
+ backoff = min(120.0, exception_sleep * (2 ** attempt))
200
206
  logger.info(f"Rate limited (429), backing off for {backoff:.1f}s")
201
207
  await asyncio.sleep(backoff)
202
208
  else:
@@ -220,9 +226,8 @@ class AsyncRequest(AsyncContextManager['AsyncRequest']):
220
226
 
221
227
  return response
222
228
 
223
- except Exception as e:
224
- logger.error(
225
- f"Request error: {e} - {url} - attempt {attempt + 1}/{max_attempt}")
229
+ except (httpx.HTTPError, OSError) as e:
230
+ logger.error(f"Request error: {e} - {url} - attempt {attempt + 1}/{max_attempt}")
226
231
  if attempt + 1 == max_attempt:
227
232
  return None
228
233
  await asyncio.sleep(exception_sleep)
@@ -241,6 +246,33 @@ class AsyncRequest(AsyncContextManager['AsyncRequest']):
241
246
  self._client = None
242
247
 
243
248
 
249
+ async def close_shared_client() -> None:
250
+ """Close all domain HTTP clients to release resources"""
251
+ global _domain_clients
252
+ async with _client_lock:
253
+ for domain, client in list(_domain_clients.items()):
254
+ if not client.is_closed:
255
+ await client.aclose()
256
+ _domain_clients.clear()
257
+
258
+
259
+ async def close_domain_client(url: str, http2: Optional[bool] = None) -> None:
260
+ """Close HTTP client for a specific domain. If http2 is None, closes both h1 and h2 clients."""
261
+ domain = _extract_domain(url)
262
+ async with _client_lock:
263
+ keys_to_close = []
264
+ if http2 is None:
265
+ keys_to_close = [f"{domain}:h1", f"{domain}:h2"]
266
+ else:
267
+ keys_to_close = [f"{domain}:{'h2' if http2 else 'h1'}"]
268
+
269
+ for key in keys_to_close:
270
+ if key in _domain_clients:
271
+ if not _domain_clients[key].is_closed:
272
+ await _domain_clients[key].aclose()
273
+ del _domain_clients[key]
274
+
275
+
244
276
  async def make_request(
245
277
  url: str,
246
278
  method: HttpMethod = "GET",
@@ -250,6 +282,7 @@ async def make_request(
250
282
  json_data: Optional[JsonType] = None,
251
283
  files: Optional[Dict[str, FileData]] = None,
252
284
  data: Optional[Union[str, bytes]] = None,
285
+ form_data: Optional[Dict[str, Any]] = None,
253
286
  proxy: Optional[str] = None,
254
287
  timeout_request: int = 60,
255
288
  max_attempt: int = 10,
@@ -259,102 +292,141 @@ async def make_request(
259
292
  skip_response: Optional[Union[str, list[str]]] = None,
260
293
  exception_sleep: float = 10,
261
294
  add_user_agent: bool = False,
295
+ follow_redirects: bool = True,
296
+ verify_ssl: bool = False,
297
+ no_retry_status_codes: Optional[list[int]] = None,
298
+ log_errors: bool = True,
299
+ http2: bool = True,
300
+ jitter: float = 0.1,
262
301
  ) -> Optional[Response]:
263
- """Main function to execute HTTP requests using shared client for connection reuse"""
264
- # Use shared client to avoid "Too many open files" error
265
- client = await _get_shared_client()
302
+ """Execute HTTP requests using per-domain client for connection reuse."""
303
+ # Use dedicated client if proxy is specified, otherwise use per-domain pooled client
304
+ own_client = None
305
+ if proxy:
306
+ ssl_context = _create_optimized_ssl_context() if not verify_ssl else True
307
+ own_client = httpx.AsyncClient(
308
+ verify=ssl_context,
309
+ timeout=timeout_request,
310
+ follow_redirects=follow_redirects,
311
+ proxy=proxy,
312
+ http2=http2,
313
+ limits=httpx.Limits(
314
+ max_connections=20,
315
+ max_keepalive_connections=10,
316
+ keepalive_expiry=30.0
317
+ )
318
+ )
319
+ client = own_client
320
+ else:
321
+ client = await _get_domain_client(url, http2=http2)
266
322
 
267
323
  # Prepare headers
268
- request_headers = dict(headers or {})
324
+ request_headers = headers.copy() if headers else {}
269
325
  if add_user_agent:
270
326
  request_headers["User-Agent"] = await _get_user_agent()
271
327
 
272
328
  # Prepare files for multipart/form-data
273
- files_dict = None
274
- if files:
275
- files_dict = {}
276
- for field_name, (filename, content, content_type) in files.items():
277
- files_dict[field_name] = (filename, content, content_type)
329
+ files_dict = {
330
+ field_name: (filename, content, content_type)
331
+ for field_name, (filename, content, content_type) in files.items()
332
+ } if files else None
278
333
 
334
+ # Filter empty params
279
335
  if params:
280
336
  params = {k: v for k, v in params.items() if v}
281
337
 
282
- for attempt in range(max_attempt):
283
- try:
284
- # Execute request with all necessary parameters
285
- httpx_response = await client.request(
286
- method=method,
287
- url=url,
288
- params=params,
289
- json=json_data,
290
- files=files_dict,
291
- headers=request_headers,
292
- timeout=timeout_request,
293
- data=data,
294
- )
338
+ # Determine data payload: form_data takes precedence over raw data
339
+ request_data = form_data if form_data else data
295
340
 
296
- # Create custom Response object
297
- response = Response(
298
- status_code=httpx_response.status_code,
299
- headers=dict(httpx_response.headers),
300
- _content=httpx_response.content,
301
- text=httpx_response.text
302
- )
341
+ try:
342
+ for attempt in range(max_attempt):
343
+ try:
344
+ # Execute request with all necessary parameters
345
+ httpx_response = await client.request(
346
+ method=method,
347
+ url=url,
348
+ params=params,
349
+ json=json_data,
350
+ files=files_dict,
351
+ headers=request_headers,
352
+ timeout=timeout_request,
353
+ data=request_data,
354
+ cookies=cookies,
355
+ follow_redirects=follow_redirects,
356
+ )
303
357
 
304
- # Handle unsuccessful status codes
305
- if response.status_code not in range(200, 300):
306
- logger.warning(
307
- f"Request: {response.status_code}\n"
308
- f"Attempt {attempt + 1}/{max_attempt}\n"
309
- f"Url: {url}\n"
310
- f"Params: {params}\n"
311
- f"Response: {response.text[:1000]}\n"
312
- f"Request data: {json_data}\n"
358
+ # Create custom Response object
359
+ response = Response(
360
+ status_code=httpx_response.status_code,
361
+ headers=dict(httpx_response.headers),
362
+ _content=httpx_response.content,
363
+ text=httpx_response.text,
364
+ url=str(httpx_response.url),
313
365
  )
314
- if skip_response:
315
- patterns = [skip_response] if isinstance(
316
- skip_response, str) else skip_response
317
- if patterns and any(pattern in response.text for pattern in patterns if pattern):
366
+
367
+ # Handle unsuccessful status codes
368
+ if response.status_code not in _SUCCESS_STATUS_RANGE:
369
+ if log_errors:
370
+ logger.warning(
371
+ f"Request: {response.status_code}\n"
372
+ f"Attempt {attempt + 1}/{max_attempt}\n"
373
+ f"Url: {url}\n"
374
+ f"Params: {params}\n"
375
+ f"Response: {response.text[:1000]}\n"
376
+ f"Request data: {json_data}\n"
377
+ )
378
+
379
+ # Exit immediately for specific status codes (no retry)
380
+ if no_retry_status_codes and response.status_code in no_retry_status_codes:
318
381
  return response if force_response else None
319
382
 
320
- if attempt + 1 == max_attempt:
321
- return response if force_response else None
322
-
323
- # Exponential backoff for 429 (rate limit)
324
- if response.status_code == 429:
325
- backoff = min(120, exception_sleep * (2 ** attempt))
326
- logger.info(f"Rate limited (429), backing off for {backoff:.1f}s")
327
- await asyncio.sleep(backoff)
328
- else:
329
- await asyncio.sleep(exception_sleep)
330
- continue
383
+ if skip_response:
384
+ patterns = [skip_response] if isinstance(skip_response, str) else skip_response
385
+ if patterns and any(pattern in response.text for pattern in patterns if pattern):
386
+ return response if force_response else None
387
+
388
+ if attempt + 1 == max_attempt:
389
+ return response if force_response else None
390
+
391
+ # Exponential backoff for 429 (rate limit)
392
+ if response.status_code == 429:
393
+ backoff = min(120.0, exception_sleep * (2 ** attempt))
394
+ if log_errors:
395
+ logger.info(f"Rate limited (429), backing off for {backoff:.1f}s")
396
+ await asyncio.sleep(_apply_jitter(backoff, jitter))
397
+ else:
398
+ await asyncio.sleep(_apply_jitter(exception_sleep, jitter))
399
+ continue
331
400
 
332
- # Validate JSON response
333
- if json_response:
334
- try:
335
- response_data = response.json()
336
- if json_response_check and json_response_check not in response_data:
401
+ # Validate JSON response
402
+ if json_response:
403
+ try:
404
+ response_data = response.json()
405
+ if json_response_check and json_response_check not in response_data:
406
+ if attempt + 1 == max_attempt:
407
+ return None
408
+ await asyncio.sleep(_apply_jitter(exception_sleep, jitter))
409
+ continue
410
+ except json.JSONDecodeError:
337
411
  if attempt + 1 == max_attempt:
338
412
  return None
339
- await asyncio.sleep(exception_sleep)
413
+ await asyncio.sleep(_apply_jitter(exception_sleep, jitter))
340
414
  continue
341
- except json.JSONDecodeError:
342
- if attempt + 1 == max_attempt:
343
- return None
344
- await asyncio.sleep(exception_sleep)
345
- continue
346
415
 
347
- return response
416
+ return response
348
417
 
349
- except Exception as e:
350
- logger.error(
351
- f"Request error: {e} - {url} - attempt {attempt + 1}/{max_attempt}")
352
- if attempt + 1 == max_attempt:
353
- return None
354
- await asyncio.sleep(exception_sleep)
355
- continue
418
+ except (httpx.HTTPError, OSError) as e:
419
+ if log_errors:
420
+ logger.error(f"Request error: {e} - {url} - attempt {attempt + 1}/{max_attempt}")
421
+ if attempt + 1 == max_attempt:
422
+ return None
423
+ await asyncio.sleep(_apply_jitter(exception_sleep, jitter))
424
+ continue
356
425
 
357
- return None
426
+ return None
427
+ finally:
428
+ if own_client:
429
+ await own_client.aclose()
358
430
 
359
431
 
360
432
  @lru_cache(maxsize=1)
@@ -368,13 +440,12 @@ def _get_session_cffi() -> AsyncSession:
368
440
 
369
441
 
370
442
  async def make_request_cffi(url: str) -> Optional[str]:
371
- """Optimized HTTP client with connection reuse and error handling."""
443
+ """HTTP client using curl_cffi for browser impersonation."""
372
444
  try:
373
445
  response = await _get_session_cffi().get(url)
374
- print(response)
375
446
  response.raise_for_status()
376
447
  return response.text
377
- except Exception:
448
+ except (OSError, IOError):
378
449
  return None
379
450
 
380
451
 
@@ -393,6 +464,6 @@ async def test_make_request_cffi():
393
464
  print(r)
394
465
 
395
466
  if __name__ == "__main__":
396
- # asyncio.run(make_request("https://api.geckoterminal.com/api/v2/networks/zora-network/trending_pools?include=base_token%2C%20quote_token%2C%20dex&page=1", method="GET"))
467
+ print(asyncio.run(make_request("https://italiaonline.it", method="GET")))
397
468
  # asyncio.run(test_proxy())
398
- asyncio.run(test_make_request_cffi())
469
+ # asyncio.run(test_make_request_cffi())
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: esuls
3
- Version: 0.1.11
3
+ Version: 0.1.13
4
4
  Summary: Utility library for async database operations, HTTP requests, and parallel execution
5
5
  Author-email: IperGiove <ipergiove@gmail.com>
6
6
  License: MIT
@@ -15,7 +15,7 @@ License-File: LICENSE
15
15
  Requires-Dist: aiosqlite>=0.21.0
16
16
  Requires-Dist: curl-cffi>=0.13.0
17
17
  Requires-Dist: fake-useragent>=2.2.0
18
- Requires-Dist: httpx>=0.28.1
18
+ Requires-Dist: httpx[http2]>=0.28.1
19
19
  Requires-Dist: loguru>=0.7.3
20
20
  Requires-Dist: pillow>=12.0.0
21
21
  Requires-Dist: python-magic>=0.4.27
@@ -1,7 +1,7 @@
1
1
  aiosqlite>=0.21.0
2
2
  curl-cffi>=0.13.0
3
3
  fake-useragent>=2.2.0
4
- httpx>=0.28.1
4
+ httpx[http2]>=0.28.1
5
5
  loguru>=0.7.3
6
6
  pillow>=12.0.0
7
7
  python-magic>=0.4.27
File without changes
File without changes
File without changes
File without changes
File without changes