esuls 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
esuls/__init__.py ADDED
@@ -0,0 +1,21 @@
1
+ """
2
+ esuls - Utility library for async database operations, HTTP requests, and parallel execution
3
+ """
4
+
5
+ __version__ = "0.1.0"
6
+
7
+ # Import all utilities
8
+ from .utils import run_parallel
9
+ from .db_cli import AsyncDB, BaseModel
10
+ from .request_cli import AsyncRequest, make_request, make_request_cffi, Response
11
+
12
+ __all__ = [
13
+ '__version__',
14
+ 'run_parallel',
15
+ 'AsyncDB',
16
+ 'BaseModel',
17
+ 'AsyncRequest',
18
+ 'make_request',
19
+ 'make_request_cffi',
20
+ 'Response',
21
+ ]
esuls/db_cli.py ADDED
@@ -0,0 +1,439 @@
1
+ import asyncio
2
+ import aiosqlite
3
+ import json
4
+ from datetime import datetime
5
+ from pathlib import Path
6
+ from typing import Any, Dict, List, Optional, TypeVar, Generic, Type, get_type_hints, Union, Tuple
7
+ from dataclasses import dataclass, asdict, fields, is_dataclass, field
8
+ from functools import lru_cache
9
+ import uuid
10
+ import contextlib
11
+ import enum
12
+
13
+ T = TypeVar('T')
14
+ SchemaType = TypeVar('SchemaType', bound='BaseModel')
15
+
16
+ @dataclass
17
+ class BaseModel:
18
+ id: str = field(default_factory=lambda: str(uuid.uuid4()), metadata={"primary_key": True})
19
+ created_at: datetime = field(default_factory=datetime.now)
20
+ updated_at: datetime = field(default_factory=datetime.now)
21
+
22
+
23
+ class AsyncDB(Generic[SchemaType]):
24
+ """High-performance async SQLite with dataclass schema and reliable connection handling."""
25
+
26
+ OPERATOR_MAP = {
27
+ 'gt': '>', 'lt': '<', 'gte': '>=', 'lte': '<=',
28
+ 'neq': '!=', 'like': 'LIKE', 'in': 'IN', 'eq': '='
29
+ }
30
+
31
+ def __init__(self, db_path: Union[str, Path], table_name: str, schema_class: Type[SchemaType]):
32
+ """Initialize AsyncDB with a path and schema dataclass."""
33
+ if not is_dataclass(schema_class):
34
+ raise TypeError(f"Schema must be a dataclass, got {schema_class}")
35
+
36
+ self.db_path = Path(db_path).resolve()
37
+ self.schema_class = schema_class
38
+ self.table_name = table_name
39
+ self.db_path.parent.mkdir(parents=True, exist_ok=True)
40
+
41
+ # Make schema initialization unique per instance
42
+ self._db_key = f"{str(self.db_path)}:{self.table_name}:{self.schema_class.__name__}"
43
+ self._write_lock = asyncio.Lock()
44
+ self._type_hints = get_type_hints(schema_class)
45
+
46
+ # Use a class-level set to track initialized schemas
47
+ if not hasattr(AsyncDB, '_initialized_schemas'):
48
+ AsyncDB._initialized_schemas = set()
49
+
50
+ async def _get_connection(self) -> aiosqlite.Connection:
51
+ """Create a new optimized connection."""
52
+ db = await aiosqlite.connect(self.db_path)
53
+ # Fast WAL mode with minimal sync
54
+ await db.execute("PRAGMA journal_mode=WAL")
55
+ await db.execute("PRAGMA synchronous=NORMAL")
56
+ await db.execute("PRAGMA cache_size=10000")
57
+
58
+ # Initialize schema if needed (check per unique schema)
59
+ if self._db_key not in AsyncDB._initialized_schemas:
60
+ await self._init_schema(db)
61
+ AsyncDB._initialized_schemas.add(self._db_key)
62
+
63
+ return db
64
+
65
+ async def _init_schema(self, db: aiosqlite.Connection) -> None:
66
+ """Generate schema from dataclass structure with support for field additions."""
67
+ print(f"Initializing schema for {self.schema_class.__name__} in table {self.table_name}")
68
+
69
+ field_defs = []
70
+ indexes = []
71
+
72
+ # First check if table exists
73
+ cursor = await db.execute(
74
+ "SELECT name FROM sqlite_master WHERE type='table' AND name=?",
75
+ (self.table_name,)
76
+ )
77
+ table_exists = await cursor.fetchone() is not None
78
+
79
+ existing_columns = set()
80
+ if table_exists:
81
+ # Get existing columns if table exists
82
+ cursor = await db.execute(f"PRAGMA table_info({self.table_name})")
83
+ columns = await cursor.fetchall()
84
+ existing_columns = {col[1] for col in columns} # col[1] is the column name
85
+
86
+ # Process all fields in the dataclass - ONLY THIS SCHEMA CLASS
87
+ schema_fields = fields(self.schema_class)
88
+ print(f"Processing {len(schema_fields)} fields for {self.schema_class.__name__}")
89
+
90
+ for f in schema_fields:
91
+ field_name = f.name
92
+ field_type = self._type_hints.get(field_name)
93
+ print(f" Field: {field_name} -> {field_type}")
94
+
95
+ # Map Python types to SQLite types
96
+ if field_type in (int, bool):
97
+ sql_type = "INTEGER"
98
+ elif field_type in (float,):
99
+ sql_type = "REAL"
100
+ elif field_type in (str, enum.EnumType):
101
+ sql_type = "TEXT"
102
+ elif field_type in (datetime,):
103
+ sql_type = "TIMESTAMP"
104
+ elif field_type == List[str]:
105
+ sql_type = "TEXT" # Stored as JSON
106
+ else:
107
+ sql_type = "TEXT" # Default to TEXT/JSON for complex types
108
+
109
+ # Handle special field metadata
110
+ constraints = []
111
+ if f.metadata.get('primary_key'):
112
+ constraints.append("PRIMARY KEY")
113
+ if f.metadata.get('unique'):
114
+ constraints.append("UNIQUE")
115
+ if not f.default and not f.default_factory and f.metadata.get('required', True):
116
+ constraints.append("NOT NULL")
117
+
118
+ field_def = f"{field_name} {sql_type} {' '.join(constraints)}"
119
+
120
+ if not table_exists:
121
+ # Add field definition for new table creation
122
+ field_defs.append(field_def)
123
+ elif field_name not in existing_columns:
124
+ # Alter table to add the new column without NOT NULL constraint
125
+ alter_sql = f"ALTER TABLE {self.table_name} ADD COLUMN {field_name} {sql_type}"
126
+ print(f" Adding new column: {alter_sql}")
127
+ await db.execute(alter_sql)
128
+ await db.commit()
129
+
130
+ # Handle indexes
131
+ if f.metadata.get('index'):
132
+ index_name = f"idx_{self.table_name}_{field_name}"
133
+ index_sql = f"CREATE INDEX IF NOT EXISTS {index_name} ON {self.table_name}({field_name})"
134
+ indexes.append(index_sql)
135
+
136
+ # Create table if it doesn't exist
137
+ if not table_exists:
138
+ # Check for table constraints
139
+ table_constraints = getattr(self.schema_class, '__table_constraints__', [])
140
+
141
+ constraints_sql = ""
142
+ if table_constraints:
143
+ constraints_sql = ", " + ", ".join(table_constraints)
144
+
145
+ create_sql = f"""
146
+ CREATE TABLE IF NOT EXISTS {self.table_name} (
147
+ {', '.join(field_defs)}{constraints_sql}
148
+ )
149
+ """
150
+ print(f"Creating table: {create_sql}")
151
+ await db.execute(create_sql)
152
+
153
+ # Create indexes
154
+ for idx_stmt in indexes:
155
+ await db.execute(idx_stmt)
156
+
157
+ await db.commit()
158
+ print(f"Schema initialization complete for {self.schema_class.__name__}")
159
+
160
+ @contextlib.asynccontextmanager
161
+ async def transaction(self):
162
+ """Run operations in a transaction with reliable cleanup."""
163
+ db = await self._get_connection()
164
+ try:
165
+ yield db
166
+ await db.commit()
167
+ except Exception:
168
+ await db.rollback()
169
+ raise
170
+ finally:
171
+ await db.close()
172
+
173
+ # @lru_cache(maxsize=128)
174
+ def _serialize_value(self, value: Any) -> Any:
175
+ """Fast value serialization with type-based optimization."""
176
+ if value is None or isinstance(value, (int, float, bool, str)):
177
+ return value
178
+ if isinstance(value, datetime):
179
+ return value.isoformat()
180
+ if isinstance(value, enum.Enum):
181
+ return value.value
182
+ if isinstance(value, (list, dict, tuple)):
183
+ return json.dumps(value)
184
+ return str(value)
185
+
186
+ def _deserialize_value(self, field_name: str, value: Any) -> Any:
187
+ """Deserialize values based on field type."""
188
+ if value is None:
189
+ return value
190
+
191
+ field_type = self._type_hints.get(field_name)
192
+
193
+ # Handle string fields - ensure phone numbers are strings
194
+ if field_type is str or (hasattr(field_type, '__origin__') and field_type.__origin__ is Union and str in getattr(field_type, '__args__', ())):
195
+ return str(value)
196
+
197
+ if field_type is datetime and isinstance(value, str):
198
+ return datetime.fromisoformat(value)
199
+
200
+ # Handle enum types
201
+ if hasattr(field_type, '__origin__') and field_type.__origin__ is Union:
202
+ # Handle Optional[EnumType] case
203
+ args = getattr(field_type, '__args__', ())
204
+ for arg in args:
205
+ if arg is not type(None) and hasattr(arg, '__bases__') and enum.Enum in arg.__bases__:
206
+ try:
207
+ return arg(value)
208
+ except (ValueError, TypeError):
209
+ pass
210
+ elif hasattr(field_type, '__bases__') and enum.Enum in field_type.__bases__:
211
+ # Handle direct enum types
212
+ try:
213
+ return field_type(value)
214
+ except (ValueError, TypeError):
215
+ pass
216
+
217
+ if isinstance(value, str):
218
+ try:
219
+ return json.loads(value)
220
+ except (json.JSONDecodeError, TypeError):
221
+ pass
222
+
223
+ return value
224
+
225
+ @lru_cache(maxsize=64)
226
+ def _generate_save_sql(self, field_names: Tuple[str, ...]) -> str:
227
+ """Generate efficient SQL for upsert with proper conflict handling."""
228
+ columns = ','.join(field_names)
229
+ placeholders = ','.join('?' for _ in field_names)
230
+
231
+ return f"""
232
+ INSERT OR REPLACE INTO {self.table_name} ({columns},id)
233
+ VALUES ({placeholders},?)
234
+ """
235
+
236
+ async def save_batch(self, items: List[SchemaType], skip_errors: bool = True) -> int:
237
+ """Save multiple items in a single transaction for better performance.
238
+
239
+ Args:
240
+ items: List of schema objects to save
241
+ skip_errors: If True, skip items that cause errors
242
+
243
+ Returns:
244
+ Number of items successfully saved
245
+ """
246
+ if not items:
247
+ return 0
248
+
249
+ saved_count = 0
250
+
251
+ async with self._write_lock:
252
+ async with self.transaction() as db:
253
+ for item in items:
254
+ try:
255
+ if not isinstance(item, self.schema_class):
256
+ if not skip_errors:
257
+ raise TypeError(f"Expected {self.schema_class.__name__}, got {type(item).__name__}")
258
+ continue
259
+
260
+ # Extract and process data
261
+ data = asdict(item)
262
+ item_id = data.pop('id', None) or str(uuid.uuid4())
263
+
264
+ # Ensure created_at and updated_at are set
265
+ now = datetime.now()
266
+ if not data.get('created_at'):
267
+ data['created_at'] = now
268
+ data['updated_at'] = now
269
+
270
+ # Prepare SQL and values
271
+ field_names = tuple(sorted(data.keys()))
272
+ sql = self._generate_save_sql(field_names)
273
+ values = [self._serialize_value(data[name]) for name in field_names]
274
+ values.append(item_id)
275
+
276
+ # Execute save
277
+ await db.execute(sql, values)
278
+ saved_count += 1
279
+
280
+ except Exception as e:
281
+ if skip_errors:
282
+ # Optionally log the error for debugging
283
+ # print(f"Save error (skipped): {e}")
284
+ continue
285
+ raise
286
+
287
+ return saved_count
288
+
289
+ async def save(self, item: SchemaType, skip_errors: bool = True) -> bool:
290
+ """Store a schema object with upsert functionality and error handling.
291
+
292
+ Args:
293
+ item: The schema object to save
294
+ skip_errors: If True, silently skip errors and return False. If False, raise errors.
295
+
296
+ Returns:
297
+ True if save was successful, False if error occurred and skip_errors=True
298
+ """
299
+ try:
300
+ if not isinstance(item, self.schema_class):
301
+ if skip_errors:
302
+ return False
303
+ raise TypeError(f"Expected {self.schema_class.__name__}, got {type(item).__name__}")
304
+
305
+ # Extract and process data
306
+ data = asdict(item)
307
+ item_id = data.pop('id', None) or str(uuid.uuid4())
308
+
309
+ # Ensure created_at and updated_at are set
310
+ now = datetime.now()
311
+ if not data.get('created_at'):
312
+ data['created_at'] = now
313
+ data['updated_at'] = now
314
+
315
+ # Prepare SQL and values
316
+ field_names = tuple(sorted(data.keys()))
317
+ sql = self._generate_save_sql(field_names)
318
+ values = [self._serialize_value(data[name]) for name in field_names]
319
+ values.append(item_id)
320
+
321
+ # Perform save with reliable transaction
322
+ async with self._write_lock:
323
+ async with self.transaction() as db:
324
+ await db.execute(sql, values)
325
+
326
+ return True
327
+
328
+ except Exception as e:
329
+ if skip_errors:
330
+ # Optionally log the error for debugging
331
+ # print(f"Save error (skipped): {e}")
332
+ return False
333
+ raise
334
+
335
+ async def get_by_id(self, id: str) -> Optional[SchemaType]:
336
+ """Fetch an item by ID with reliable connection handling."""
337
+ async with self.transaction() as db:
338
+ cursor = await db.execute(f"SELECT * FROM {self.table_name} WHERE id = ?", (id,))
339
+ row = await cursor.fetchone()
340
+
341
+ if not row:
342
+ return None
343
+
344
+ # Get column names and build data dictionary
345
+ columns = [desc[0] for desc in cursor.description]
346
+ return self.schema_class(**{
347
+ col: self._deserialize_value(col, row[i])
348
+ for i, col in enumerate(columns)
349
+ })
350
+
351
+ def _build_where_clause(self, filters: Dict[str, Any]) -> Tuple[str, List[Any]]:
352
+ """Build optimized WHERE clause for queries."""
353
+ if not filters:
354
+ return "", []
355
+
356
+ conditions = []
357
+ values = []
358
+
359
+ for key, value in filters.items():
360
+ # Handle special values
361
+ if value == 'now':
362
+ value = datetime.now()
363
+
364
+ # Parse field and operator
365
+ parts = key.split('__', 1)
366
+ field = parts[0]
367
+
368
+ if len(parts) > 1 and parts[1] in self.OPERATOR_MAP:
369
+ op_str = self.OPERATOR_MAP[parts[1]]
370
+
371
+ # Handle IN operator specially
372
+ if op_str == 'IN' and isinstance(value, (list, tuple)):
373
+ placeholders = ','.join(['?'] * len(value))
374
+ conditions.append(f"{field} IN ({placeholders})")
375
+ values.extend(value)
376
+ else:
377
+ conditions.append(f"{field} {op_str} ?")
378
+ values.append(value)
379
+ else:
380
+ # Default to equality
381
+ conditions.append(f"{field} = ?")
382
+ values.append(value)
383
+
384
+ return f"WHERE {' AND '.join(conditions)}", values
385
+
386
+ async def find(self, order_by=None, **filters) -> List[SchemaType]:
387
+ """Query items with reliable connection handling."""
388
+ where_clause, values = self._build_where_clause(filters)
389
+
390
+ # Build query
391
+ query = f"SELECT * FROM {self.table_name} {where_clause}"
392
+
393
+ # Add ORDER BY clause if specified
394
+ if order_by:
395
+ order_fields = [order_by] if isinstance(order_by, str) else order_by
396
+ order_clauses = [
397
+ f"{field[1:]} DESC" if field.startswith('-') else f"{field} ASC"
398
+ for field in order_fields
399
+ ]
400
+ query += f" ORDER BY {', '.join(order_clauses)}"
401
+
402
+ # Execute query with reliable transaction
403
+ async with self.transaction() as db:
404
+ cursor = await db.execute(query, values)
405
+ rows = await cursor.fetchall()
406
+
407
+ if not rows:
408
+ return []
409
+
410
+ # Process results
411
+ columns = [desc[0] for desc in cursor.description]
412
+ return [
413
+ self.schema_class(**{
414
+ col: self._deserialize_value(col, row[i])
415
+ for i, col in enumerate(columns)
416
+ })
417
+ for row in rows
418
+ ]
419
+
420
+ async def count(self, **filters) -> int:
421
+ """Count items matching filters with reliable connection handling."""
422
+ where_clause, values = self._build_where_clause(filters)
423
+ query = f"SELECT COUNT(*) FROM {self.table_name} {where_clause}"
424
+
425
+ async with self.transaction() as db:
426
+ cursor = await db.execute(query, values)
427
+ result = await cursor.fetchone()
428
+ return result[0] if result else 0
429
+
430
+ async def fetch_all(self) -> List[SchemaType]:
431
+ """Retrieve all items."""
432
+ return await self.find()
433
+
434
+ async def delete(self, id: str) -> bool:
435
+ """Delete an item by ID with reliable transaction handling."""
436
+ async with self._write_lock:
437
+ async with self.transaction() as db:
438
+ cursor = await db.execute(f"DELETE FROM {self.table_name} WHERE id = ?", (id,))
439
+ return cursor.rowcount > 0
esuls/request_cli.py ADDED
@@ -0,0 +1,384 @@
1
+ from dataclasses import dataclass
2
+ from functools import lru_cache
3
+ from typing import TypeAlias, Union, Optional, Dict, Any, TypeVar, AsyncContextManager, Literal
4
+ import asyncio
5
+ import json
6
+ import ssl
7
+ from loguru import logger
8
+ import httpx
9
+ from fake_useragent import UserAgent
10
+ from curl_cffi.requests import AsyncSession
11
+
12
+ # Type definitions optimized
13
+ ResponseT = TypeVar('ResponseT', bound='Response')
14
+ JsonType: TypeAlias = Dict[str, Any]
15
+ FileData: TypeAlias = tuple[str, Union[bytes, str], str]
16
+ Headers: TypeAlias = Dict[str, str]
17
+ HttpMethod: TypeAlias = Literal["GET", "POST",
18
+ "PUT", "DELETE", "PATCH", "HEAD", "OPTIONS"]
19
+
20
+ # Global shared client with connection pooling to prevent "Too many open files" error
21
+ _shared_client: Optional[httpx.AsyncClient] = None
22
+ _client_lock = asyncio.Lock()
23
+
24
+ # Global cached UserAgent to prevent file descriptor exhaustion
25
+ _user_agent: Optional[UserAgent] = None
26
+ _user_agent_lock = asyncio.Lock()
27
+
28
+
29
+ async def _get_user_agent() -> str:
30
+ """Get or create cached UserAgent instance to avoid file descriptor leaks"""
31
+ global _user_agent
32
+ async with _user_agent_lock:
33
+ if _user_agent is None:
34
+ try:
35
+ _user_agent = UserAgent()
36
+ except Exception as e:
37
+ # Fallback to a static user agent if UserAgent() fails
38
+ logger.warning(
39
+ f"Failed to initialize UserAgent, using fallback: {e}")
40
+ return "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36"
41
+
42
+ try:
43
+ return _user_agent.random
44
+ except Exception as e:
45
+ logger.warning(
46
+ f"Failed to get random user agent, using fallback: {e}")
47
+ return "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36"
48
+
49
+
50
+ @lru_cache(maxsize=1)
51
+ def _create_optimized_ssl_context() -> ssl.SSLContext:
52
+ """Create an SSL context optimized for performance"""
53
+ ctx = ssl._create_default_https_context()
54
+ ctx.check_hostname = False
55
+ ctx.verify_mode = ssl.CERT_NONE
56
+ ctx.set_alpn_protocols(['http/1.1'])
57
+ ctx.post_handshake_auth = True
58
+ return ctx
59
+
60
+
61
+ async def _get_shared_client() -> httpx.AsyncClient:
62
+ """Get or create shared HTTP client with connection pooling"""
63
+ global _shared_client
64
+ async with _client_lock:
65
+ if _shared_client is None or _shared_client.is_closed:
66
+ _shared_client = httpx.AsyncClient(
67
+ verify=_create_optimized_ssl_context(),
68
+ timeout=60,
69
+ follow_redirects=True,
70
+ limits=httpx.Limits(
71
+ max_connections=100,
72
+ max_keepalive_connections=50,
73
+ keepalive_expiry=30.0
74
+ )
75
+ )
76
+ return _shared_client
77
+
78
+
79
+ @dataclass(frozen=True)
80
+ class Response:
81
+ """Immutable response object with strong typing"""
82
+ status_code: int
83
+ headers: Headers
84
+ _content: bytes
85
+ text: str
86
+
87
+ @property
88
+ def content(self) -> bytes:
89
+ return self._content
90
+
91
+ def json(self) -> JsonType:
92
+ return json.loads(self.text)
93
+
94
+
95
+ class AsyncRequest(AsyncContextManager['AsyncRequest']):
96
+ def __init__(self) -> None:
97
+ # self._logger = logging.getLogger(__name__)
98
+ self._ssl_context = self._create_optimized_ssl_context()
99
+ self._client: Optional[httpx.AsyncClient] = None
100
+
101
+ @staticmethod
102
+ @lru_cache(maxsize=1)
103
+ def _create_optimized_ssl_context() -> ssl.SSLContext:
104
+ """Create an SSL context optimized for performance"""
105
+ ctx = ssl._create_default_https_context()
106
+ ctx.check_hostname = False
107
+ ctx.verify_mode = ssl.CERT_NONE
108
+ ctx.set_alpn_protocols(['http/1.1'])
109
+ ctx.post_handshake_auth = True
110
+ return ctx
111
+
112
+ async def request(
113
+ self,
114
+ url: str,
115
+ method: HttpMethod = "GET",
116
+ headers: Optional[Headers] = None,
117
+ cookies: Optional[Dict[str, str]] = None,
118
+ params: Optional[Dict[str, Any]] = None,
119
+ json_data: Optional[JsonType] = None,
120
+ files: Optional[Dict[str, FileData]] = None,
121
+ proxy: Optional[str] = None,
122
+ timeout_request: int = 60,
123
+ max_attempt: int = 10,
124
+ force_response: bool = False,
125
+ json_response: bool = False,
126
+ json_response_check: Optional[str] = None,
127
+ skip_response: Optional[Union[str, list[str]]] = None,
128
+ exception_sleep: float = 10,
129
+ add_user_agent: bool = False
130
+ ) -> Optional[Response]:
131
+ """Execute an HTTP request with type handling and automatic retry"""
132
+ # Prepare headers
133
+ request_headers = dict(headers or {})
134
+ if add_user_agent:
135
+ request_headers["User-Agent"] = await _get_user_agent()
136
+
137
+ # Initialize client if not already done
138
+ if self._client is None:
139
+ self._client = httpx.AsyncClient(
140
+ verify=self._ssl_context,
141
+ timeout=timeout_request,
142
+ cookies=cookies,
143
+ headers=request_headers,
144
+ proxy=proxy,
145
+ follow_redirects=True,
146
+ # http2=True # Enable HTTP/2 for better performance
147
+ )
148
+
149
+ # Prepare files for multipart/form-data
150
+ files_dict = None
151
+ if files:
152
+ files_dict = {}
153
+ for field_name, (filename, content, content_type) in files.items():
154
+ files_dict[field_name] = (filename, content, content_type)
155
+
156
+ if params:
157
+ params = {k: v for k, v in params.items() if v}
158
+ for attempt in range(max_attempt):
159
+ try:
160
+ # Execute request with all necessary parameters
161
+ httpx_response = await self._client.request(
162
+ method=method,
163
+ url=url,
164
+ params=params,
165
+ json=json_data,
166
+ files=files_dict,
167
+ )
168
+
169
+ # Create custom Response object
170
+ response = Response(
171
+ status_code=httpx_response.status_code,
172
+ headers=dict(httpx_response.headers),
173
+ _content=httpx_response.content,
174
+ text=httpx_response.text
175
+ )
176
+
177
+ # Handle unsuccessful status codes
178
+ if response.status_code not in range(200, 300):
179
+ logger.warning(
180
+ f"Request: {response.status_code}\n"
181
+ f"Attempt {attempt}/{max_attempt}\n"
182
+ f"Url: {url}\n"
183
+ f"Params: {params}\n"
184
+ f"Response: {response.text[:1000]}\n"
185
+ f"Request data: {json_data}\n"
186
+ )
187
+ if skip_response:
188
+ patterns = [skip_response] if isinstance(
189
+ skip_response, str) else skip_response
190
+ # Skip if patterns list is empty
191
+ if patterns and any(pattern in response.text for pattern in patterns if pattern):
192
+ return response if force_response else None
193
+
194
+ if attempt + 1 == max_attempt:
195
+ return response if force_response else None
196
+
197
+ await asyncio.sleep(exception_sleep)
198
+ continue
199
+
200
+ # Validate JSON response
201
+ if json_response:
202
+ try:
203
+ data = response.json()
204
+ if json_response_check and json_response_check not in data:
205
+ if attempt + 1 == max_attempt:
206
+ return None
207
+ await asyncio.sleep(exception_sleep)
208
+ continue
209
+ except json.JSONDecodeError:
210
+ if attempt + 1 == max_attempt:
211
+ return None
212
+ await asyncio.sleep(exception_sleep)
213
+ continue
214
+
215
+ return response
216
+
217
+ except Exception as e:
218
+ logger.error(
219
+ f"Request error: {e} - {url} - attempt {attempt + 1}/{max_attempt}")
220
+ if attempt + 1 == max_attempt:
221
+ return None
222
+ await asyncio.sleep(exception_sleep)
223
+ continue
224
+
225
+ return None
226
+
227
+ async def __aenter__(self) -> 'AsyncRequest':
228
+ """Context manager entry point"""
229
+ return self
230
+
231
+ async def __aexit__(self, exc_type, exc_val, exc_tb) -> None:
232
+ """Context manager exit point"""
233
+ if self._client:
234
+ await self._client.aclose()
235
+ self._client = None
236
+
237
+
238
+ async def make_request(
239
+ url: str,
240
+ method: HttpMethod = "GET",
241
+ headers: Optional[Headers] = None,
242
+ cookies: Optional[Dict[str, str]] = None,
243
+ params: Optional[Dict[str, Any]] = None,
244
+ json_data: Optional[JsonType] = None,
245
+ files: Optional[Dict[str, FileData]] = None,
246
+ proxy: Optional[str] = None,
247
+ timeout_request: int = 60,
248
+ max_attempt: int = 10,
249
+ force_response: bool = False,
250
+ json_response: bool = False,
251
+ json_response_check: Optional[str] = None,
252
+ skip_response: Optional[Union[str, list[str]]] = None,
253
+ exception_sleep: float = 10,
254
+ add_user_agent: bool = False,
255
+ ) -> Optional[Response]:
256
+ """Main function to execute HTTP requests using shared client for connection reuse"""
257
+ # Use shared client to avoid "Too many open files" error
258
+ client = await _get_shared_client()
259
+
260
+ # Prepare headers
261
+ request_headers = dict(headers or {})
262
+ if add_user_agent:
263
+ request_headers["User-Agent"] = await _get_user_agent()
264
+
265
+ # Prepare files for multipart/form-data
266
+ files_dict = None
267
+ if files:
268
+ files_dict = {}
269
+ for field_name, (filename, content, content_type) in files.items():
270
+ files_dict[field_name] = (filename, content, content_type)
271
+
272
+ if params:
273
+ params = {k: v for k, v in params.items() if v}
274
+
275
+ for attempt in range(max_attempt):
276
+ try:
277
+ # Execute request with all necessary parameters
278
+ httpx_response = await client.request(
279
+ method=method,
280
+ url=url,
281
+ params=params,
282
+ json=json_data,
283
+ files=files_dict,
284
+ headers=request_headers,
285
+ timeout=timeout_request,
286
+ )
287
+
288
+ # Create custom Response object
289
+ response = Response(
290
+ status_code=httpx_response.status_code,
291
+ headers=dict(httpx_response.headers),
292
+ _content=httpx_response.content,
293
+ text=httpx_response.text
294
+ )
295
+
296
+ # Handle unsuccessful status codes
297
+ if response.status_code not in range(200, 300):
298
+ logger.warning(
299
+ f"Request: {response.status_code}\n"
300
+ f"Attempt {attempt}/{max_attempt}\n"
301
+ f"Url: {url}\n"
302
+ f"Params: {params}\n"
303
+ f"Response: {response.text[:1000]}\n"
304
+ f"Request data: {json_data}\n"
305
+ )
306
+ if skip_response:
307
+ patterns = [skip_response] if isinstance(
308
+ skip_response, str) else skip_response
309
+ if patterns and any(pattern in response.text for pattern in patterns if pattern):
310
+ return response if force_response else None
311
+
312
+ if attempt + 1 == max_attempt:
313
+ return response if force_response else None
314
+
315
+ await asyncio.sleep(exception_sleep)
316
+ continue
317
+
318
+ # Validate JSON response
319
+ if json_response:
320
+ try:
321
+ data = response.json()
322
+ if json_response_check and json_response_check not in data:
323
+ if attempt + 1 == max_attempt:
324
+ return None
325
+ await asyncio.sleep(exception_sleep)
326
+ continue
327
+ except json.JSONDecodeError:
328
+ if attempt + 1 == max_attempt:
329
+ return None
330
+ await asyncio.sleep(exception_sleep)
331
+ continue
332
+
333
+ return response
334
+
335
+ except Exception as e:
336
+ logger.error(
337
+ f"Request error: {e} - {url} - attempt {attempt + 1}/{max_attempt}")
338
+ if attempt + 1 == max_attempt:
339
+ return None
340
+ await asyncio.sleep(exception_sleep)
341
+ continue
342
+
343
+ return None
344
+
345
+
346
+ @lru_cache(maxsize=1)
347
+ def _get_session_cffi() -> AsyncSession:
348
+ """Cached session factory with optimized settings."""
349
+ return AsyncSession(
350
+ impersonate="chrome",
351
+ timeout=30.0,
352
+ headers={'User-Agent': 'Mozilla/5.0 (compatible; Scraper)'}
353
+ )
354
+
355
+
356
+ async def make_request_cffi(url: str) -> Optional[str]:
357
+ """Optimized HTTP client with connection reuse and error handling."""
358
+ try:
359
+ response = await _get_session_cffi().get(url)
360
+ print(response)
361
+ response.raise_for_status()
362
+ return response.text
363
+ except Exception:
364
+ return None
365
+
366
+
367
+ async def test_proxy():
368
+ async with httpx.AsyncClient(proxy="http://0ce896d23159e7829ffc__cr.us:e4ada3ce93ad55ca@gw.dataimpesulse.com:823", timeout=10, verify=False) as client:
369
+ try:
370
+ r = await client.get("https://api.geckoterminal.com/api/v2/networks/zora-network/trending_pools?include=base_token%2C%20quote_token%2C%20dex&page=1")
371
+ print(f"Proxy test: {r.status_code} {r.text}")
372
+ except Exception as e:
373
+ print(f"Proxy test failed: {e}")
374
+
375
+
376
+ async def test_make_request_cffi():
377
+ url = "https://gmgn.ai/eth/token/0xeee2a64ae321964f969299ced0f4fcadcb0a1141"
378
+ r = await make_request_cffi(url)
379
+ print(r)
380
+
381
+ if __name__ == "__main__":
382
+ # asyncio.run(make_request("https://api.geckoterminal.com/api/v2/networks/zora-network/trending_pools?include=base_token%2C%20quote_token%2C%20dex&page=1", method="GET"))
383
+ # asyncio.run(test_proxy())
384
+ asyncio.run(test_make_request_cffi())
esuls/utils.py ADDED
@@ -0,0 +1,29 @@
1
+ """
2
+ General utilities - no external dependencies required
3
+ """
4
+ import asyncio
5
+ from typing import Awaitable, Callable, List, TypeVar
6
+
7
+ T = TypeVar("T")
8
+
9
+
10
+ async def run_parallel(
11
+ *functions: Callable[[], Awaitable[T]],
12
+ limit: int = 20
13
+ ) -> List[T]:
14
+ """
15
+ run parallel multiple functions
16
+ """
17
+ semaphore = asyncio.Semaphore(limit)
18
+
19
+ async def limited_function(func: Callable[[], Awaitable[T]]) -> T:
20
+ async with semaphore:
21
+ return await func()
22
+
23
+ tasks = [asyncio.create_task(limited_function(func)) for func in functions]
24
+
25
+ results = []
26
+ for fut in asyncio.as_completed(tasks):
27
+ results.append(await fut)
28
+
29
+ return results
@@ -0,0 +1,290 @@
1
+ Metadata-Version: 2.4
2
+ Name: esuls
3
+ Version: 0.1.0
4
+ Summary: Utility library for async database operations, HTTP requests, and parallel execution
5
+ Author-email: IperGiove <ipergiove@gmail.com>
6
+ License: MIT
7
+ Project-URL: Homepage, https://github.com/ipergiove/esuls
8
+ Project-URL: Repository, https://github.com/ipergiove/esuls
9
+ Classifier: Programming Language :: Python :: 3
10
+ Classifier: License :: OSI Approved :: MIT License
11
+ Classifier: Operating System :: OS Independent
12
+ Requires-Python: >=3.14
13
+ Description-Content-Type: text/markdown
14
+ License-File: LICENSE
15
+ Requires-Dist: aiosqlite>=0.21.0
16
+ Requires-Dist: curl-cffi>=0.13.0
17
+ Requires-Dist: fake-useragent>=2.2.0
18
+ Requires-Dist: httpx>=0.28.1
19
+ Requires-Dist: loguru>=0.7.3
20
+ Dynamic: license-file
21
+
22
+ # esuls
23
+
24
+ A Python utility library for async database operations, HTTP requests, and parallel execution utilities.
25
+
26
+ ## Features
27
+
28
+ - **AsyncDB** - Type-safe async SQLite with dataclass schemas
29
+ - **Async HTTP client** - High-performance HTTP client with retry logic and connection pooling
30
+ - **Parallel utilities** - Async parallel execution with concurrency control
31
+ - **CloudFlare bypass** - curl-cffi integration for bypassing protections
32
+
33
+ ## Installation
34
+
35
+ ```bash
36
+ # With pip
37
+ pip install esuls
38
+
39
+ # With uv
40
+ uv pip install esuls
41
+ ```
42
+
43
+ ## Usage
44
+
45
+ ### Parallel Execution
46
+
47
+ ```python
48
+ import asyncio
49
+ from esuls import run_parallel
50
+
51
+ async def fetch_data(id):
52
+ await asyncio.sleep(1)
53
+ return f"Data {id}"
54
+
55
+ async def main():
56
+ # Run multiple async functions in parallel with concurrency limit
57
+ results = await run_parallel(
58
+ lambda: fetch_data(1),
59
+ lambda: fetch_data(2),
60
+ lambda: fetch_data(3),
61
+ limit=20 # Max concurrent tasks
62
+ )
63
+ print(results)
64
+
65
+ asyncio.run(main())
66
+ ```
67
+
68
+ ### Database Client (AsyncDB)
69
+
70
+ ```python
71
+ import asyncio
72
+ from dataclasses import dataclass, field
73
+ from esuls import AsyncDB, BaseModel
74
+
75
+ # Define your schema
76
+ @dataclass
77
+ class User(BaseModel):
78
+ name: str = field(metadata={"index": True})
79
+ email: str = field(metadata={"unique": True})
80
+ age: int = 0
81
+
82
+ async def main():
83
+ # Initialize database
84
+ db = AsyncDB(db_path="users.db", table_name="users", schema_class=User)
85
+
86
+ # Save data
87
+ user = User(name="Alice", email="alice@example.com", age=30)
88
+ await db.save(user)
89
+
90
+ # Save multiple items
91
+ users = [
92
+ User(name="Bob", email="bob@example.com", age=25),
93
+ User(name="Charlie", email="charlie@example.com", age=35)
94
+ ]
95
+ await db.save_batch(users)
96
+
97
+ # Query data
98
+ results = await db.find(name="Alice")
99
+ print(results)
100
+
101
+ # Query with filters
102
+ adults = await db.find(age__gte=18, order_by="-age")
103
+
104
+ # Count
105
+ count = await db.count(age__gte=18)
106
+
107
+ # Get by ID
108
+ user = await db.get_by_id(user_id)
109
+
110
+ # Delete
111
+ await db.delete(user_id)
112
+
113
+ asyncio.run(main())
114
+ ```
115
+
116
+ **Query Operators:**
117
+ - `field__eq` - Equal (default)
118
+ - `field__gt` - Greater than
119
+ - `field__gte` - Greater than or equal
120
+ - `field__lt` - Less than
121
+ - `field__lte` - Less than or equal
122
+ - `field__neq` - Not equal
123
+ - `field__like` - SQL LIKE
124
+ - `field__in` - IN operator (pass a list)
125
+
126
+ ### HTTP Request Client
127
+
128
+ ```python
129
+ import asyncio
130
+ from esuls import AsyncRequest, make_request
131
+
132
+ # Using context manager (recommended for multiple requests)
133
+ async def example1():
134
+ async with AsyncRequest() as client:
135
+ response = await client.request(
136
+ url="https://api.example.com/data",
137
+ method="GET",
138
+ add_user_agent=True,
139
+ max_attempt=3,
140
+ timeout_request=30
141
+ )
142
+ if response:
143
+ data = response.json()
144
+ print(data)
145
+
146
+ # Using standalone function (uses shared connection pool)
147
+ async def example2():
148
+ response = await make_request(
149
+ url="https://api.example.com/users",
150
+ method="POST",
151
+ json_data={"name": "Alice", "email": "alice@example.com"},
152
+ headers={"Authorization": "Bearer token"},
153
+ max_attempt=5,
154
+ force_response=True # Return response even on error
155
+ )
156
+ if response:
157
+ print(response.status_code)
158
+ print(response.text)
159
+
160
+ asyncio.run(example1())
161
+ ```
162
+
163
+ **Request Parameters:**
164
+ - `url` - Request URL
165
+ - `method` - HTTP method (GET, POST, PUT, DELETE, etc.)
166
+ - `headers` - Request headers
167
+ - `cookies` - Cookies dict
168
+ - `params` - URL parameters
169
+ - `json_data` - JSON body
170
+ - `files` - Multipart file upload
171
+ - `proxy` - Proxy URL
172
+ - `timeout_request` - Timeout in seconds (default: 60)
173
+ - `max_attempt` - Max retry attempts (default: 10)
174
+ - `force_response` - Return response even on error (default: False)
175
+ - `json_response` - Validate JSON response (default: False)
176
+ - `json_response_check` - Check for key in JSON response
177
+ - `skip_response` - Skip if text contains pattern(s)
178
+ - `exception_sleep` - Delay between retries in seconds (default: 10)
179
+ - `add_user_agent` - Add random User-Agent header (default: False)
180
+
181
+ ### CloudFlare Bypass
182
+
183
+ ```python
184
+ import asyncio
185
+ from esuls import make_request_cffi
186
+
187
+ async def fetch_protected_page():
188
+ html = await make_request_cffi("https://protected-site.com")
189
+ if html:
190
+ print(html)
191
+
192
+ asyncio.run(fetch_protected_page())
193
+ ```
194
+
195
+ ## Development
196
+
197
+ ### Project Structure
198
+
199
+ ```
200
+ utils/
201
+ ├── pyproject.toml
202
+ ├── README.md
203
+ ├── LICENSE
204
+ └── src/
205
+ └── esuls/
206
+ ├── __init__.py
207
+ ├── utils.py # Parallel execution utilities
208
+ ├── db_cli.py # AsyncDB with dataclass schemas
209
+ └── request_cli.py # Async HTTP client
210
+ ```
211
+
212
+ ### Local Development Installation
213
+
214
+ ```bash
215
+ # Navigate to the project
216
+ cd utils
217
+
218
+ # Install in editable mode with uv
219
+ uv pip install -e .
220
+
221
+ # Or with pip
222
+ pip install -e .
223
+ ```
224
+
225
+ ### Building and Publishing
226
+
227
+ ```bash
228
+ # With uv
229
+ uv build
230
+ twine upload dist/*
231
+
232
+ # Or with traditional tools
233
+ pip install build twine
234
+ python -m build
235
+ twine upload dist/*
236
+ ```
237
+
238
+ ## Advanced Features
239
+
240
+ ### AsyncDB Schema Definition
241
+
242
+ ```python
243
+ from dataclasses import dataclass, field
244
+ from esuls import BaseModel
245
+ from datetime import datetime
246
+ from typing import Optional, List
247
+ import enum
248
+
249
+ class Status(enum.Enum):
250
+ ACTIVE = "active"
251
+ INACTIVE = "inactive"
252
+
253
+ @dataclass
254
+ class User(BaseModel):
255
+ # BaseModel provides: id, created_at, updated_at
256
+
257
+ # Indexed field
258
+ email: str = field(metadata={"index": True, "unique": True})
259
+
260
+ # Simple fields
261
+ name: str = ""
262
+ age: int = 0
263
+
264
+ # Enum support
265
+ status: Status = Status.ACTIVE
266
+
267
+ # JSON-serialized complex types
268
+ tags: List[str] = field(default_factory=list)
269
+
270
+ # Optional fields
271
+ phone: Optional[str] = None
272
+
273
+ # Table constraints (optional)
274
+ __table_constraints__ = [
275
+ "CHECK (age >= 0)"
276
+ ]
277
+ ```
278
+
279
+ ### Connection Pooling & Performance
280
+
281
+ The HTTP client uses:
282
+ - Shared connection pool (prevents "too many open files" errors)
283
+ - Automatic retry with exponential backoff
284
+ - SSL optimization
285
+ - Random User-Agent rotation
286
+ - Cookie and header persistence
287
+
288
+ ## License
289
+
290
+ MIT License
@@ -0,0 +1,9 @@
1
+ esuls/__init__.py,sha256=z77yDz8qY_y0dTvy76Sg8_oyNSLSBE8zE1d49EBUb30,467
2
+ esuls/db_cli.py,sha256=YOPBlKpLV4GG5WzcHov6hpZkmW-vaL1BNlBcAm8bz4k,17424
3
+ esuls/request_cli.py,sha256=8wt2MQ4Y3J-vEVacizbbuHLSrENm41UhnnGGzi-DfDE,14277
4
+ esuls/utils.py,sha256=R0peIanodvDrKYFWWdLZ9weIPAUZX787XIjZH40qNo0,677
5
+ esuls-0.1.0.dist-info/licenses/LICENSE,sha256=AY0N01ARt0kbKB7CkByYLqqNQU-yalb-rpv-eXITEWA,1066
6
+ esuls-0.1.0.dist-info/METADATA,sha256=B8AsFkbt5UHrtd3RX_4MUXubvoPd-XyxLX6MIc5jVuE,6928
7
+ esuls-0.1.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
8
+ esuls-0.1.0.dist-info/top_level.txt,sha256=WWBDHRhQ0DQLBZKD7Un8uFN93GvVQnP4WvJKkvbACVA,6
9
+ esuls-0.1.0.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (80.9.0)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025 IperGiove
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1 @@
1
+ esuls