mm-std 0.4.18__py3-none-any.whl → 0.5.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,126 +0,0 @@
1
- from __future__ import annotations
2
-
3
- import asyncio
4
- import logging
5
- from collections.abc import Awaitable
6
- from dataclasses import dataclass
7
- from typing import Any
8
-
9
- logger = logging.getLogger(__name__)
10
-
11
-
12
- class AsyncTaskRunner:
13
- """
14
- AsyncTaskRunner executes a batch of asynchronous tasks with controlled concurrency.
15
- Note: This runner is designed for one-time use. Create a new instance for each batch of tasks.
16
- """
17
-
18
- @dataclass
19
- class Result:
20
- results: dict[str, Any] # Maps task_id to result
21
- exceptions: dict[str, Any] # Maps task_id to exception (if any)
22
- is_ok: bool # True if no exception and no timeout occurred
23
- is_timeout: bool # True if at least one task was cancelled due to timeout
24
-
25
- @dataclass
26
- class Task:
27
- """Individual task representation"""
28
-
29
- task_id: str
30
- awaitable: Awaitable[Any]
31
-
32
- def __init__(
33
- self, max_concurrent_tasks: int, timeout: float | None = None, name: str | None = None, no_logging: bool = False
34
- ) -> None:
35
- """
36
- :param max_concurrent_tasks: Maximum number of tasks that can run concurrently.
37
- :param timeout: Optional overall timeout in seconds for running all tasks.
38
- :param name: Optional name for the runner.
39
- :param no_logging: If True, suppresses logging for task exception.
40
- """
41
- if timeout is not None and timeout <= 0:
42
- raise ValueError("Timeout must be positive if specified.")
43
- self.max_concurrent_tasks: int = max_concurrent_tasks
44
- self.timeout: float | None = timeout
45
- self.name = name
46
- self.no_logging = no_logging
47
- self.semaphore: asyncio.Semaphore = asyncio.Semaphore(max_concurrent_tasks)
48
- self._tasks: list[AsyncTaskRunner.Task] = []
49
- self._was_run: bool = False
50
- self._task_ids: set[str] = set()
51
-
52
- def add_task(
53
- self,
54
- task_id: str,
55
- awaitable: Awaitable[Any],
56
- ) -> None:
57
- """
58
- Adds a task to the runner that will be executed when run() is called.
59
-
60
- :param task_id: Unique identifier for the task.
61
- :param awaitable: The awaitable (coroutine) to execute.
62
- :raises RuntimeError: If the runner has already been used.
63
- :raises ValueError: If task_id is empty or already exists.
64
- """
65
- if self._was_run:
66
- raise RuntimeError("This AsyncTaskRunner has already been used. Create a new instance for new tasks.")
67
-
68
- if not task_id:
69
- raise ValueError("Task ID cannot be empty")
70
-
71
- if task_id in self._task_ids:
72
- raise ValueError(f"Task ID '{task_id}' already exists. All task IDs must be unique.")
73
-
74
- self._task_ids.add(task_id)
75
- self._tasks.append(AsyncTaskRunner.Task(task_id, awaitable))
76
-
77
- def _task_name(self, task_id: str) -> str:
78
- return f"{self.name}-{task_id}" if self.name else task_id
79
-
80
- async def run(self) -> AsyncTaskRunner.Result:
81
- """
82
- Executes all added tasks with concurrency limited by the semaphore.
83
- If a timeout is specified, non-finished tasks are cancelled.
84
-
85
- :return: AsyncTaskRunner.Result containing task results, exceptions, and flags indicating overall status.
86
- :raises RuntimeError: If the runner has already been used.
87
- """
88
- if self._was_run:
89
- raise RuntimeError("This AsyncTaskRunner instance can only be run once. Create a new instance for new tasks.")
90
-
91
- self._was_run = True
92
- results: dict[str, Any] = {}
93
- exceptions: dict[str, Any] = {}
94
- is_timeout: bool = False
95
-
96
- async def run_task(task: AsyncTaskRunner.Task) -> None:
97
- async with self.semaphore:
98
- try:
99
- res: Any = await task.awaitable
100
- results[task.task_id] = res
101
- except Exception as e:
102
- if not self.no_logging:
103
- logger.exception("Task raised an exception", extra={"task_id": task.task_id})
104
- exceptions[task.task_id] = e
105
-
106
- # Create asyncio tasks for all runner tasks
107
- tasks = [asyncio.create_task(run_task(task), name=self._task_name(task.task_id)) for task in self._tasks]
108
-
109
- try:
110
- if self.timeout is not None:
111
- # Run with timeout
112
- await asyncio.wait_for(asyncio.gather(*tasks), timeout=self.timeout)
113
- else:
114
- # Run without timeout
115
- await asyncio.gather(*tasks)
116
- except TimeoutError:
117
- # Cancel all running tasks on timeout
118
- for task in tasks:
119
- if not task.done():
120
- task.cancel()
121
- # Wait for tasks to complete cancellation
122
- await asyncio.gather(*tasks, return_exceptions=True)
123
- is_timeout = True
124
-
125
- is_ok: bool = (not exceptions) and (not is_timeout)
126
- return AsyncTaskRunner.Result(results=results, exceptions=exceptions, is_ok=is_ok, is_timeout=is_timeout)
@@ -1,35 +0,0 @@
1
- import functools
2
- from collections import defaultdict
3
- from collections.abc import Callable
4
- from threading import Lock
5
-
6
-
7
- def synchronized_parameter[T, **P](arg_index: int = 0, skip_if_locked: bool = False) -> Callable[..., Callable[P, T | None]]:
8
- locks: dict[object, Lock] = defaultdict(Lock)
9
-
10
- def outer(func: Callable[P, T]) -> Callable[P, T | None]:
11
- @functools.wraps(func)
12
- def wrapper(*args: P.args, **kwargs: P.kwargs) -> T | None:
13
- if skip_if_locked and locks[args[arg_index]].locked():
14
- return None
15
- try:
16
- with locks[args[arg_index]]:
17
- return func(*args, **kwargs)
18
- finally:
19
- locks.pop(args[arg_index], None)
20
-
21
- wrapper.locks = locks # type: ignore[attr-defined]
22
- return wrapper
23
-
24
- return outer
25
-
26
-
27
- def synchronized[T, **P](fn: Callable[P, T]) -> Callable[P, T]:
28
- lock = Lock()
29
-
30
- @functools.wraps(fn)
31
- def wrapper(*args: P.args, **kwargs: P.kwargs) -> T:
32
- with lock:
33
- return fn(*args, **kwargs)
34
-
35
- return wrapper
@@ -1,73 +0,0 @@
1
- import time
2
- from dataclasses import dataclass, field
3
- from datetime import datetime
4
- from logging import Logger
5
- from threading import Thread
6
-
7
- from mm_std.date import is_too_old, utc_now
8
- from mm_std.types_ import Func
9
-
10
-
11
- class Scheduler:
12
- def __init__(self, log: Logger, loop_delay: float = 0.5, debug: bool = False) -> None:
13
- self.log = log
14
- self.debug = debug
15
- self.loop_delay = loop_delay
16
- self.stopped = False
17
- self.jobs: list[Scheduler.Job] = []
18
- self.run_immediately_jobs: list[Scheduler.Job] = []
19
- self._debug("init")
20
-
21
- @dataclass
22
- class Job:
23
- func: Func
24
- args: tuple[object, ...]
25
- interval: int
26
- is_running: bool = False
27
- last_at: datetime = field(default_factory=utc_now)
28
-
29
- def __str__(self) -> str:
30
- return str(self.func)
31
-
32
- def add_job(self, func: Func, interval: int, args: tuple[object, ...] = (), run_immediately: bool = False) -> None:
33
- job = Scheduler.Job(func, args, interval)
34
- self.jobs.append(job)
35
- if run_immediately:
36
- self.run_immediately_jobs.append(job)
37
-
38
- def _run_job(self, job: Job) -> None:
39
- self._debug(f"_run_job: {job}")
40
- if self.stopped:
41
- return
42
- try:
43
- job.func(*job.args)
44
- self._debug(f"_run_job: {job} done")
45
- except Exception:
46
- self.log.exception("scheduler error")
47
- self._debug(f"_run_job: {job} error")
48
- finally:
49
- job.is_running = False
50
-
51
- def _start(self) -> None:
52
- self._debug(f"_start: jobs={len(self.jobs)}, run_immediately_jobs={len(self.run_immediately_jobs)}")
53
- for j in self.run_immediately_jobs:
54
- j.is_running = True
55
- j.last_at = utc_now()
56
- Thread(target=self._run_job, args=(j,)).start()
57
- while not self.stopped:
58
- for j in self.jobs:
59
- if not j.is_running and is_too_old(j.last_at, j.interval):
60
- j.is_running = True
61
- j.last_at = utc_now()
62
- Thread(target=self._run_job, args=(j,)).start()
63
- time.sleep(self.loop_delay)
64
-
65
- def _debug(self, message: str) -> None:
66
- if self.debug:
67
- self.log.debug("Scheduler: %s", message)
68
-
69
- def start(self) -> None:
70
- Thread(target=self._start).start()
71
-
72
- def stop(self) -> None:
73
- self.stopped = True
@@ -1,45 +0,0 @@
1
- import concurrent
2
- from concurrent.futures import ThreadPoolExecutor
3
- from dataclasses import dataclass
4
-
5
- from mm_std.types_ import Args, Func, Kwargs
6
-
7
-
8
- class ConcurrentTasks:
9
- def __init__(self, max_workers: int = 5, timeout: int | None = None, thread_name_prefix: str = "concurrent_tasks") -> None:
10
- self.max_workers = max_workers
11
- self.timeout = timeout
12
- self.thread_name_prefix = thread_name_prefix
13
- self.tasks: list[ConcurrentTasks.Task] = []
14
- self.exceptions: dict[str, Exception] = {}
15
- self.error = False
16
- self.timeout_error = False
17
- self.result: dict[str, object] = {}
18
-
19
- @dataclass
20
- class Task:
21
- key: str
22
- func: Func
23
- args: Args
24
- kwargs: Kwargs
25
-
26
- def add_task(self, key: str, func: Func, args: Args = (), kwargs: Kwargs | None = None) -> None:
27
- if kwargs is None:
28
- kwargs = {}
29
- self.tasks.append(ConcurrentTasks.Task(key, func, args, kwargs))
30
-
31
- def execute(self) -> None:
32
- with ThreadPoolExecutor(self.max_workers, thread_name_prefix=self.thread_name_prefix) as executor:
33
- future_to_key = {executor.submit(task.func, *task.args, **task.kwargs): task.key for task in self.tasks}
34
- try:
35
- result_map = concurrent.futures.as_completed(future_to_key, timeout=self.timeout)
36
- for future in result_map:
37
- key = future_to_key[future]
38
- try:
39
- self.result[key] = future.result()
40
- except Exception as err:
41
- self.error = True
42
- self.exceptions[key] = err
43
- except concurrent.futures.TimeoutError:
44
- self.error = True
45
- self.timeout_error = True
mm_std/config.py DELETED
@@ -1,81 +0,0 @@
1
- import sys
2
- import tomllib
3
- from pathlib import Path
4
- from typing import Any, NoReturn, Self, TypeVar
5
-
6
- from pydantic import BaseModel, ConfigDict, ValidationError
7
-
8
- from .print_ import print_json, print_plain
9
- from .result import Result
10
- from .zip import read_text_from_zip_archive
11
-
12
- T = TypeVar("T", bound="BaseConfig")
13
-
14
-
15
- class BaseConfig(BaseModel):
16
- model_config = ConfigDict(extra="forbid")
17
-
18
- def print_and_exit(self, exclude: set[str] | None = None, count: set[str] | None = None) -> NoReturn:
19
- data = self.model_dump(exclude=exclude)
20
- if count:
21
- for k in count:
22
- data[k] = len(data[k])
23
- print_json(data)
24
- sys.exit(0)
25
-
26
- @classmethod
27
- def read_toml_config_or_exit(cls, config_path: Path, zip_password: str = "") -> Self: # nosec
28
- res: Result[Self] = cls.read_toml_config(config_path, zip_password)
29
- if res.is_ok():
30
- return res.unwrap()
31
- cls._print_error_and_exit(res)
32
-
33
- @classmethod
34
- async def read_toml_config_or_exit_async(cls, config_path: Path, zip_password: str = "") -> Self: # nosec
35
- res: Result[Self] = await cls.read_toml_config_async(config_path, zip_password)
36
- if res.is_ok():
37
- return res.unwrap()
38
- cls._print_error_and_exit(res)
39
-
40
- @classmethod
41
- def read_toml_config(cls, config_path: Path, zip_password: str = "") -> Result[Self]: # nosec
42
- try:
43
- config_path = config_path.expanduser()
44
- if config_path.name.endswith(".zip"):
45
- data = tomllib.loads(read_text_from_zip_archive(config_path, password=zip_password))
46
- else:
47
- with config_path.open("rb") as f:
48
- data = tomllib.load(f)
49
- return Result.ok(cls(**data))
50
- except ValidationError as e:
51
- return Result.err(("validator_error", e), extra={"errors": e.errors()})
52
- except Exception as e:
53
- return Result.err(e)
54
-
55
- @classmethod
56
- async def read_toml_config_async(cls, config_path: Path, zip_password: str = "") -> Result[Self]: # nosec
57
- try:
58
- config_path = config_path.expanduser()
59
- if config_path.name.endswith(".zip"):
60
- data = tomllib.loads(read_text_from_zip_archive(config_path, password=zip_password))
61
- else:
62
- with config_path.open("rb") as f:
63
- data = tomllib.load(f)
64
- model = await cls.model_validate(data) # type:ignore[misc]
65
- return Result.ok(model)
66
- except ValidationError as e:
67
- return Result.err(("validator_error", e), extra={"errors": e.errors()})
68
- except Exception as e:
69
- return Result.err(e)
70
-
71
- @classmethod
72
- def _print_error_and_exit(cls, res: Result[Any]) -> NoReturn:
73
- if res.error == "validator_error" and res.extra:
74
- print_plain("config validation errors")
75
- for e in res.extra["errors"]:
76
- loc = e["loc"]
77
- field = ".".join(str(lo) for lo in loc) if len(loc) > 0 else ""
78
- print_plain(f"{field} {e['msg']}")
79
- else:
80
- print_plain(f"can't parse config file: {res.error} {res.exception}")
81
- sys.exit(1)
mm_std/crypto/__init__.py DELETED
File without changes
mm_std/crypto/fernet.py DELETED
@@ -1,13 +0,0 @@
1
- from cryptography.fernet import Fernet
2
-
3
-
4
- def fernet_generate_key() -> str:
5
- return Fernet.generate_key().decode()
6
-
7
-
8
- def fernet_encrypt(*, data: str, key: str) -> str:
9
- return Fernet(key).encrypt(data.encode()).decode()
10
-
11
-
12
- def fernet_decrypt(*, encoded_data: str, key: str) -> str:
13
- return Fernet(key).decrypt(encoded_data).decode()
mm_std/crypto/openssl.py DELETED
@@ -1,109 +0,0 @@
1
- import base64
2
- import secrets
3
- from hashlib import pbkdf2_hmac
4
-
5
- from cryptography.hazmat.primitives import padding
6
- from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
7
-
8
-
9
- class OpensslAes256Cbc:
10
- """
11
- AES-256-CBC encryption/decryption compatible with OpenSSL's `enc -aes-256-cbc -pbkdf2 -iter 1000000`.
12
-
13
- Provides both raw-byte and Base64-encoded interfaces:
14
- • encrypt_bytes / decrypt_bytes: work with bytes
15
- • encrypt_base64 / decrypt_base64: work with Base64 strings
16
-
17
- Usage:
18
- >>> cipher = OpensslAes256Cbc(password="mypassword")
19
- >>> # raw bytes
20
- >>> ciphertext = cipher.encrypt_bytes(b"secret")
21
- >>> plaintext = cipher.decrypt_bytes(ciphertext)
22
- >>> # Base64 convenience
23
- >>> token = cipher.encrypt_base64("secret message")
24
- >>> result = cipher.decrypt_base64(token)
25
- >>> print(result)
26
- secret message
27
-
28
- OpenSSL compatibility:
29
- echo "secret message" |
30
- openssl enc -aes-256-cbc -pbkdf2 -iter 1000000 -salt -base64 -pass pass:mypassword
31
-
32
- echo "U2FsdGVkX1/dGGdg6SExWgtKxvuLroWqhezy54aTt1g=" |
33
- openssl enc -d -aes-256-cbc -pbkdf2 -iter 1000000 -base64 -pass pass:mypassword
34
- """
35
-
36
- MAGIC_HEADER = b"Salted__"
37
- SALT_SIZE = 8
38
- KEY_SIZE = 32 # AES-256
39
- IV_SIZE = 16 # AES block size
40
- ITERATIONS = 1_000_000
41
- HEADER_LEN = len(MAGIC_HEADER)
42
-
43
- def __init__(self, password: str) -> None:
44
- """
45
- Initialize the cipher with password. Uses a fixed iteration count of 1,000,000.
46
-
47
- Args:
48
- password: Password for encryption/decryption
49
- """
50
- self._password = password.encode("utf-8")
51
-
52
- def _derive_key_iv(self, salt: bytes) -> tuple[bytes, bytes]:
53
- key_iv = pbkdf2_hmac(
54
- hash_name="sha256", password=self._password, salt=salt, iterations=self.ITERATIONS, dklen=self.KEY_SIZE + self.IV_SIZE
55
- )
56
- return key_iv[: self.KEY_SIZE], key_iv[self.KEY_SIZE :]
57
-
58
- def encrypt_bytes(self, plaintext: bytes) -> bytes:
59
- """Encrypt raw bytes and return encrypted bytes (OpenSSL compatible)."""
60
- salt = secrets.token_bytes(self.SALT_SIZE)
61
- key, iv = self._derive_key_iv(salt)
62
-
63
- padder = padding.PKCS7(algorithms.AES.block_size).padder()
64
- padded = padder.update(plaintext) + padder.finalize()
65
-
66
- cipher = Cipher(algorithms.AES(key), modes.CBC(iv))
67
- encryptor = cipher.encryptor()
68
- ciphertext = encryptor.update(padded) + encryptor.finalize()
69
-
70
- return self.MAGIC_HEADER + salt + ciphertext
71
-
72
- def decrypt_bytes(self, encrypted: bytes) -> bytes:
73
- """Decrypt raw encrypted bytes (as produced by encrypt_bytes)."""
74
- if not encrypted.startswith(self.MAGIC_HEADER):
75
- raise ValueError("Invalid format: missing OpenSSL salt header")
76
-
77
- salt = encrypted[self.HEADER_LEN : self.HEADER_LEN + self.SALT_SIZE]
78
- ciphertext = encrypted[self.HEADER_LEN + self.SALT_SIZE :]
79
-
80
- key, iv = self._derive_key_iv(salt)
81
- cipher = Cipher(algorithms.AES(key), modes.CBC(iv))
82
- decryptor = cipher.decryptor()
83
-
84
- try:
85
- padded = decryptor.update(ciphertext) + decryptor.finalize()
86
- except ValueError as exc:
87
- raise ValueError("Decryption failed: wrong password or corrupted data") from exc
88
-
89
- unpadder = padding.PKCS7(algorithms.AES.block_size).unpadder()
90
- try:
91
- data = unpadder.update(padded) + unpadder.finalize()
92
- except ValueError as exc:
93
- raise ValueError("Decryption failed: wrong password or corrupted data") from exc
94
-
95
- return data
96
-
97
- def encrypt_base64(self, plaintext: str) -> str:
98
- """Encrypt a UTF-8 string and return Base64-encoded encrypted data."""
99
- raw = self.encrypt_bytes(plaintext.encode("utf-8"))
100
- return base64.b64encode(raw).decode("ascii")
101
-
102
- def decrypt_base64(self, b64_encoded: str) -> str:
103
- """Decode Base64, decrypt bytes, and return UTF-8 string."""
104
- try:
105
- raw = base64.b64decode(b64_encoded.strip())
106
- except Exception as exc:
107
- raise ValueError("Invalid base64 format") from exc
108
- plaintext_bytes = self.decrypt_bytes(raw)
109
- return plaintext_bytes.decode("utf-8")
mm_std/dict.py DELETED
@@ -1,49 +0,0 @@
1
- from collections import defaultdict
2
- from collections.abc import Mapping, MutableMapping
3
- from typing import TypeVar, cast
4
-
5
- K = TypeVar("K")
6
- V = TypeVar("V")
7
- DictType = TypeVar("DictType", bound=MutableMapping[K, V]) # type: ignore[valid-type]
8
-
9
-
10
- def replace_empty_dict_entries(
11
- data: DictType,
12
- defaults: Mapping[K, V] | None = None,
13
- zero_is_empty: bool = False,
14
- false_is_empty: bool = False,
15
- empty_string_is_empty: bool = True,
16
- ) -> DictType:
17
- """
18
- Replace empty entries in a dictionary with provided default values,
19
- or remove them if no default is available. Returns the same type as the input dictionary.
20
- """
21
- if defaults is None:
22
- defaults = {}
23
-
24
- try:
25
- if isinstance(data, defaultdict):
26
- result: MutableMapping[K, V] = defaultdict(data.default_factory)
27
- else:
28
- result = data.__class__()
29
- except Exception:
30
- result = {}
31
-
32
- for key, value in data.items():
33
- should_replace = (
34
- value is None
35
- or (empty_string_is_empty and value == "")
36
- or (zero_is_empty and value == 0)
37
- or (false_is_empty and value is False)
38
- )
39
-
40
- if should_replace:
41
- if key in defaults:
42
- new_value = defaults[key]
43
- else:
44
- continue # Skip the key if no default is available
45
- else:
46
- new_value = value
47
-
48
- result[key] = new_value
49
- return cast(DictType, result)
mm_std/env.py DELETED
@@ -1,9 +0,0 @@
1
- import os
2
-
3
- from dotenv import load_dotenv
4
-
5
- load_dotenv()
6
-
7
-
8
- def get_dotenv(key: str) -> str | None:
9
- return os.getenv(key)
mm_std/fs.py DELETED
@@ -1,13 +0,0 @@
1
- from pathlib import Path
2
-
3
-
4
- def read_text(path: str | Path) -> str:
5
- if isinstance(path, str):
6
- path = Path(path)
7
- return path.read_text()
8
-
9
-
10
- def get_filename_without_extension(path: str | Path) -> str:
11
- if isinstance(path, str):
12
- path = Path(path)
13
- return path.stem
mm_std/http/__init__.py DELETED
File without changes
@@ -1,126 +0,0 @@
1
- import aiohttp
2
- from aiohttp import ClientHttpProxyError, InvalidUrlClientError
3
- from aiohttp.typedefs import LooseCookies, Query
4
- from aiohttp_socks import ProxyConnectionError, ProxyConnector
5
- from multidict import CIMultiDictProxy
6
-
7
- from mm_std.http.http_response import HttpError, HttpResponse
8
-
9
-
10
- async def http_request(
11
- url: str,
12
- *,
13
- method: str = "GET",
14
- params: Query | None = None,
15
- data: dict[str, object] | None = None,
16
- json: dict[str, object] | None = None,
17
- headers: dict[str, str] | None = None,
18
- cookies: LooseCookies | None = None,
19
- user_agent: str | None = None,
20
- proxy: str | None = None,
21
- timeout: float | None = 10.0,
22
- ) -> HttpResponse:
23
- """
24
- Send an HTTP request and return the response.
25
- """
26
- timeout_ = aiohttp.ClientTimeout(total=timeout) if timeout else None
27
- if user_agent:
28
- if not headers:
29
- headers = {}
30
- headers["user-agent"] = user_agent
31
-
32
- try:
33
- if proxy and proxy.startswith("socks"):
34
- return await _request_with_socks_proxy(
35
- url,
36
- method=method,
37
- params=params,
38
- data=data,
39
- json=json,
40
- headers=headers,
41
- cookies=cookies,
42
- proxy=proxy,
43
- timeout=timeout_,
44
- )
45
- return await _request_with_http_or_none_proxy(
46
- url,
47
- method=method,
48
- params=params,
49
- data=data,
50
- json=json,
51
- headers=headers,
52
- cookies=cookies,
53
- proxy=proxy,
54
- timeout=timeout_,
55
- )
56
- except TimeoutError as err:
57
- return HttpResponse(error=HttpError.TIMEOUT, error_message=str(err))
58
- except (aiohttp.ClientProxyConnectionError, ProxyConnectionError, ClientHttpProxyError) as err:
59
- return HttpResponse(error=HttpError.PROXY, error_message=str(err))
60
- except InvalidUrlClientError as e:
61
- return HttpResponse(error=HttpError.INVALID_URL, error_message=str(e))
62
- except Exception as err:
63
- return HttpResponse(error=HttpError.ERROR, error_message=str(err))
64
-
65
-
66
- async def _request_with_http_or_none_proxy(
67
- url: str,
68
- *,
69
- method: str = "GET",
70
- params: Query | None = None,
71
- data: dict[str, object] | None = None,
72
- json: dict[str, object] | None = None,
73
- headers: dict[str, str] | None = None,
74
- cookies: LooseCookies | None = None,
75
- proxy: str | None = None,
76
- timeout: aiohttp.ClientTimeout | None,
77
- ) -> HttpResponse:
78
- async with aiohttp.request(
79
- method, url, params=params, data=data, json=json, headers=headers, cookies=cookies, proxy=proxy, timeout=timeout
80
- ) as res:
81
- return HttpResponse(
82
- status_code=res.status,
83
- error=None,
84
- error_message=None,
85
- body=(await res.read()).decode(),
86
- headers=headers_dict(res.headers),
87
- )
88
-
89
-
90
- async def _request_with_socks_proxy(
91
- url: str,
92
- *,
93
- method: str = "GET",
94
- proxy: str,
95
- params: Query | None = None,
96
- data: dict[str, object] | None = None,
97
- json: dict[str, object] | None = None,
98
- headers: dict[str, str] | None = None,
99
- cookies: LooseCookies | None = None,
100
- timeout: aiohttp.ClientTimeout | None,
101
- ) -> HttpResponse:
102
- connector = ProxyConnector.from_url(proxy)
103
- async with (
104
- aiohttp.ClientSession(connector=connector) as session,
105
- session.request(
106
- method, url, params=params, data=data, json=json, headers=headers, cookies=cookies, timeout=timeout
107
- ) as res,
108
- ):
109
- return HttpResponse(
110
- status_code=res.status,
111
- error=None,
112
- error_message=None,
113
- body=(await res.read()).decode(),
114
- headers=headers_dict(res.headers),
115
- )
116
-
117
-
118
- def headers_dict(headers: CIMultiDictProxy[str]) -> dict[str, str]:
119
- result: dict[str, str] = {}
120
- for key in headers:
121
- values = headers.getall(key)
122
- if len(values) == 1:
123
- result[key] = values[0]
124
- else:
125
- result[key] = ", ".join(values)
126
- return result