mm-std 0.3.16__py3-none-any.whl → 0.3.18__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mm_std/__init__.py +3 -2
- mm_std/concurrency/async_decorators.py +38 -3
- mm_std/concurrency/async_scheduler.py +105 -45
- mm_std/concurrency/async_task_runner.py +26 -25
- mm_std/http_.py +57 -33
- {mm_std-0.3.16.dist-info → mm_std-0.3.18.dist-info}/METADATA +3 -3
- {mm_std-0.3.16.dist-info → mm_std-0.3.18.dist-info}/RECORD +8 -8
- {mm_std-0.3.16.dist-info → mm_std-0.3.18.dist-info}/WHEEL +0 -0
mm_std/__init__.py
CHANGED
@@ -2,6 +2,7 @@ from .command import CommandResult as CommandResult
|
|
2
2
|
from .command import run_command as run_command
|
3
3
|
from .command import run_ssh_command as run_ssh_command
|
4
4
|
from .concurrency.async_decorators import async_synchronized as async_synchronized
|
5
|
+
from .concurrency.async_decorators import async_synchronized_parameter as async_synchronized_parameter
|
5
6
|
from .concurrency.async_scheduler import AsyncScheduler as AsyncScheduler
|
6
7
|
from .concurrency.async_task_runner import AsyncTaskRunner as AsyncTaskRunner
|
7
8
|
from .concurrency.sync_decorators import synchronized as synchronized
|
@@ -22,10 +23,10 @@ from .http_ import CHROME_USER_AGENT as CHROME_USER_AGENT
|
|
22
23
|
from .http_ import FIREFOX_USER_AGENT as FIREFOX_USER_AGENT
|
23
24
|
from .http_ import HResponse as HResponse
|
24
25
|
from .http_ import add_query_params_to_url as add_query_params_to_url
|
25
|
-
from .http_ import ahr as ahr
|
26
|
-
from .http_ import async_hrequest as async_hrequest
|
27
26
|
from .http_ import hr as hr
|
27
|
+
from .http_ import hra as hra
|
28
28
|
from .http_ import hrequest as hrequest
|
29
|
+
from .http_ import hrequest_async as hrequest_async
|
29
30
|
from .json_ import CustomJSONEncoder as CustomJSONEncoder
|
30
31
|
from .json_ import json_dumps as json_dumps
|
31
32
|
from .log import init_logger as init_logger
|
@@ -1,15 +1,15 @@
|
|
1
|
+
import asyncio
|
1
2
|
import functools
|
3
|
+
from collections import defaultdict
|
2
4
|
from collections.abc import Awaitable, Callable
|
3
5
|
from typing import ParamSpec, TypeVar
|
4
6
|
|
5
|
-
import anyio
|
6
|
-
|
7
7
|
P = ParamSpec("P")
|
8
8
|
R = TypeVar("R")
|
9
9
|
|
10
10
|
|
11
11
|
def async_synchronized(func: Callable[P, Awaitable[R]]) -> Callable[P, Awaitable[R]]:
|
12
|
-
lock =
|
12
|
+
lock = asyncio.Lock()
|
13
13
|
|
14
14
|
@functools.wraps(func)
|
15
15
|
async def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
|
@@ -17,3 +17,38 @@ def async_synchronized(func: Callable[P, Awaitable[R]]) -> Callable[P, Awaitable
|
|
17
17
|
return await func(*args, **kwargs)
|
18
18
|
|
19
19
|
return wrapper
|
20
|
+
|
21
|
+
|
22
|
+
T = TypeVar("T")
|
23
|
+
|
24
|
+
|
25
|
+
def async_synchronized_parameter[T, **P](
|
26
|
+
arg_index: int = 0, skip_if_locked: bool = False
|
27
|
+
) -> Callable[[Callable[P, Awaitable[T]]], Callable[P, Awaitable[T | None]]]:
|
28
|
+
locks: dict[object, asyncio.Lock] = defaultdict(asyncio.Lock)
|
29
|
+
|
30
|
+
def outer(func: Callable[P, Awaitable[T]]) -> Callable[P, Awaitable[T | None]]:
|
31
|
+
@functools.wraps(func)
|
32
|
+
async def wrapper(*args: P.args, **kwargs: P.kwargs) -> T | None:
|
33
|
+
if len(args) <= arg_index:
|
34
|
+
raise ValueError(f"Function called with fewer than {arg_index + 1} positional arguments")
|
35
|
+
|
36
|
+
key = args[arg_index]
|
37
|
+
|
38
|
+
if skip_if_locked and locks[key].locked():
|
39
|
+
return None
|
40
|
+
|
41
|
+
try:
|
42
|
+
async with locks[key]:
|
43
|
+
return await func(*args, **kwargs)
|
44
|
+
finally:
|
45
|
+
# Clean up the lock if no one is waiting
|
46
|
+
# TODO: I'm not sure if the next like is OK
|
47
|
+
if not locks[key].locked() and not locks[key]._waiters: # noqa: SLF001
|
48
|
+
locks.pop(key, None)
|
49
|
+
|
50
|
+
# Store locks for potential external access
|
51
|
+
wrapper.locks = locks # type: ignore[attr-defined]
|
52
|
+
return wrapper
|
53
|
+
|
54
|
+
return outer
|
@@ -1,19 +1,29 @@
|
|
1
1
|
import asyncio
|
2
|
-
import
|
2
|
+
from collections.abc import Awaitable, Callable
|
3
3
|
from dataclasses import dataclass, field
|
4
4
|
from datetime import datetime
|
5
5
|
from logging import Logger
|
6
6
|
from typing import Any
|
7
7
|
|
8
|
-
import anyio
|
9
|
-
|
10
8
|
from mm_std.date import utc_now
|
11
|
-
|
9
|
+
|
10
|
+
type AsyncFunc = Callable[..., Awaitable[object]]
|
11
|
+
type Args = tuple[object, ...]
|
12
|
+
type Kwargs = dict[str, object]
|
12
13
|
|
13
14
|
|
14
15
|
class AsyncScheduler:
|
16
|
+
"""
|
17
|
+
A scheduler for running async tasks at fixed intervals.
|
18
|
+
|
19
|
+
Each task runs on its own schedule and waits for the specified interval
|
20
|
+
between executions.
|
21
|
+
"""
|
22
|
+
|
15
23
|
@dataclass
|
16
24
|
class TaskInfo:
|
25
|
+
"""Information about a scheduled task."""
|
26
|
+
|
17
27
|
task_id: str
|
18
28
|
interval: float
|
19
29
|
func: AsyncFunc
|
@@ -24,16 +34,29 @@ class AsyncScheduler:
|
|
24
34
|
last_run: datetime | None = None
|
25
35
|
running: bool = False
|
26
36
|
|
27
|
-
def __init__(self, logger: Logger) -> None:
|
37
|
+
def __init__(self, logger: Logger, name: str = "AsyncScheduler") -> None:
|
38
|
+
"""Initialize the async scheduler."""
|
28
39
|
self.tasks: dict[str, AsyncScheduler.TaskInfo] = {}
|
29
40
|
self._running: bool = False
|
30
|
-
self.
|
41
|
+
self._tasks: list[asyncio.Task[Any]] = []
|
31
42
|
self._main_task: asyncio.Task[Any] | None = None
|
32
|
-
self._thread: threading.Thread | None = None
|
33
43
|
self._logger = logger
|
44
|
+
self._name = name
|
34
45
|
|
35
46
|
def add_task(self, task_id: str, interval: float, func: AsyncFunc, args: Args = (), kwargs: Kwargs | None = None) -> None:
|
36
|
-
"""
|
47
|
+
"""
|
48
|
+
Register a new task with the scheduler.
|
49
|
+
|
50
|
+
Args:
|
51
|
+
task_id: Unique identifier for the task
|
52
|
+
interval: Time in seconds between task executions
|
53
|
+
func: Async function to execute
|
54
|
+
args: Positional arguments to pass to the function
|
55
|
+
kwargs: Keyword arguments to pass to the function
|
56
|
+
|
57
|
+
Raises:
|
58
|
+
ValueError: If a task with the same ID already exists
|
59
|
+
"""
|
37
60
|
if kwargs is None:
|
38
61
|
kwargs = {}
|
39
62
|
if task_id in self.tasks:
|
@@ -41,70 +64,107 @@ class AsyncScheduler:
|
|
41
64
|
self.tasks[task_id] = AsyncScheduler.TaskInfo(task_id=task_id, interval=interval, func=func, args=args, kwargs=kwargs)
|
42
65
|
|
43
66
|
async def _run_task(self, task_id: str) -> None:
|
44
|
-
"""
|
67
|
+
"""
|
68
|
+
Internal loop for running a single task repeatedly.
|
69
|
+
|
70
|
+
Args:
|
71
|
+
task_id: ID of the task to run
|
72
|
+
"""
|
45
73
|
task = self.tasks[task_id]
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
task.error_count += 1
|
53
|
-
self._logger.exception("AsyncScheduler exception")
|
54
|
-
|
55
|
-
# Calculate elapsed time and sleep if needed so that tasks never overlap.
|
56
|
-
elapsed = (utc_now() - task.last_run).total_seconds()
|
57
|
-
sleep_time = task.interval - elapsed
|
58
|
-
if sleep_time > 0:
|
74
|
+
task.running = True
|
75
|
+
|
76
|
+
try:
|
77
|
+
while self._running:
|
78
|
+
task.last_run = utc_now()
|
79
|
+
task.run_count += 1
|
59
80
|
try:
|
60
|
-
await
|
81
|
+
await task.func(*task.args, **task.kwargs)
|
61
82
|
except Exception:
|
62
|
-
|
83
|
+
task.error_count += 1
|
84
|
+
self._logger.exception(f"Exception in task {task_id}")
|
85
|
+
|
86
|
+
# Calculate elapsed time and sleep if needed
|
87
|
+
elapsed = (utc_now() - task.last_run).total_seconds()
|
88
|
+
sleep_time = max(0, task.interval - elapsed)
|
89
|
+
if sleep_time > 0:
|
90
|
+
try:
|
91
|
+
await asyncio.sleep(sleep_time)
|
92
|
+
except asyncio.CancelledError:
|
93
|
+
break
|
94
|
+
finally:
|
95
|
+
task.running = False
|
96
|
+
self._logger.debug(f"Task {task_id} stopped")
|
63
97
|
|
64
98
|
async def _start_all_tasks(self) -> None:
|
65
|
-
"""Starts all tasks concurrently
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
99
|
+
"""Starts all tasks concurrently using asyncio tasks."""
|
100
|
+
self._tasks = []
|
101
|
+
|
102
|
+
for task_id in self.tasks:
|
103
|
+
task = asyncio.create_task(self._run_task(task_id), name=self._name + "-" + task_id)
|
104
|
+
self._tasks.append(task)
|
105
|
+
|
106
|
+
try:
|
107
|
+
# Keep the main task alive while the scheduler is running
|
108
|
+
while self._running: # noqa: ASYNC110
|
109
|
+
await asyncio.sleep(0.1)
|
110
|
+
except asyncio.CancelledError:
|
111
|
+
self._logger.debug("Main scheduler task cancelled")
|
112
|
+
finally:
|
113
|
+
# Cancel all running tasks when we exit
|
114
|
+
for task in self._tasks:
|
115
|
+
if not task.done():
|
116
|
+
task.cancel()
|
117
|
+
|
118
|
+
# Wait for all tasks to finish
|
119
|
+
if self._tasks:
|
120
|
+
await asyncio.gather(*self._tasks, return_exceptions=True)
|
121
|
+
self._tasks = []
|
75
122
|
|
76
123
|
def start(self) -> None:
|
77
124
|
"""
|
78
125
|
Start the scheduler.
|
79
126
|
|
80
|
-
|
81
|
-
which runs an AnyIO event loop.
|
127
|
+
Creates tasks in the current event loop for each registered task.
|
82
128
|
"""
|
83
129
|
if self._running:
|
84
130
|
self._logger.warning("AsyncScheduler already running")
|
85
131
|
return
|
132
|
+
|
86
133
|
self._running = True
|
87
134
|
self._logger.debug("Starting AsyncScheduler")
|
88
|
-
|
89
|
-
# Create a task in the current event loop
|
90
135
|
self._main_task = asyncio.create_task(self._start_all_tasks())
|
91
136
|
|
92
137
|
def stop(self) -> None:
|
93
138
|
"""
|
94
139
|
Stop the scheduler.
|
95
140
|
|
96
|
-
|
97
|
-
This method then waits for the background thread to finish.
|
141
|
+
Cancels all running tasks and waits for them to complete.
|
98
142
|
"""
|
99
143
|
if not self._running:
|
100
144
|
self._logger.warning("AsyncScheduler not running")
|
101
145
|
return
|
146
|
+
|
102
147
|
self._logger.debug("Stopping AsyncScheduler")
|
103
148
|
self._running = False
|
104
|
-
if self._cancel_scope is not None:
|
105
|
-
self._cancel_scope.cancel()
|
106
149
|
|
107
|
-
if self.
|
108
|
-
self.
|
109
|
-
|
150
|
+
if self._main_task and not self._main_task.done():
|
151
|
+
self._main_task.cancel()
|
152
|
+
|
110
153
|
self._logger.debug("AsyncScheduler stopped")
|
154
|
+
|
155
|
+
def is_running(self) -> bool:
|
156
|
+
"""
|
157
|
+
Check if the scheduler is currently running.
|
158
|
+
|
159
|
+
Returns:
|
160
|
+
True if the scheduler is running, False otherwise
|
161
|
+
"""
|
162
|
+
return self._running
|
163
|
+
|
164
|
+
def clear_tasks(self) -> None:
|
165
|
+
"""Clear all tasks from the scheduler."""
|
166
|
+
if self._running:
|
167
|
+
self._logger.warning("Cannot clear tasks while scheduler is running")
|
168
|
+
return
|
169
|
+
self.tasks.clear()
|
170
|
+
self._logger.debug("Cleared all tasks from the scheduler")
|
@@ -1,11 +1,10 @@
|
|
1
1
|
from __future__ import annotations
|
2
2
|
|
3
|
-
|
3
|
+
import asyncio
|
4
|
+
from collections.abc import Awaitable
|
4
5
|
from dataclasses import dataclass
|
5
6
|
from typing import Any
|
6
7
|
|
7
|
-
import anyio
|
8
|
-
|
9
8
|
|
10
9
|
class AsyncTaskRunner:
|
11
10
|
"""
|
@@ -25,20 +24,20 @@ class AsyncTaskRunner:
|
|
25
24
|
"""Individual task representation"""
|
26
25
|
|
27
26
|
task_id: str
|
28
|
-
|
29
|
-
args: tuple[Any, ...]
|
30
|
-
kwargs: dict[str, Any]
|
27
|
+
awaitable: Awaitable[Any]
|
31
28
|
|
32
|
-
def __init__(self, max_concurrent_tasks: int, timeout: float | None = None) -> None:
|
29
|
+
def __init__(self, max_concurrent_tasks: int, timeout: float | None = None, name: str | None = None) -> None:
|
33
30
|
"""
|
34
31
|
:param max_concurrent_tasks: Maximum number of tasks that can run concurrently.
|
35
32
|
:param timeout: Optional overall timeout in seconds for running all tasks.
|
33
|
+
:param name: Optional name for the runner.
|
36
34
|
"""
|
37
35
|
if timeout is not None and timeout <= 0:
|
38
36
|
raise ValueError("Timeout must be positive if specified.")
|
39
37
|
self.max_concurrent_tasks: int = max_concurrent_tasks
|
40
38
|
self.timeout: float | None = timeout
|
41
|
-
self.
|
39
|
+
self.name = name
|
40
|
+
self.semaphore: asyncio.Semaphore = asyncio.Semaphore(max_concurrent_tasks)
|
42
41
|
self._tasks: list[AsyncTaskRunner.Task] = []
|
43
42
|
self._was_run: bool = False
|
44
43
|
self._task_ids: set[str] = set()
|
@@ -46,17 +45,13 @@ class AsyncTaskRunner:
|
|
46
45
|
def add_task(
|
47
46
|
self,
|
48
47
|
task_id: str,
|
49
|
-
|
50
|
-
*args: object,
|
51
|
-
**kwargs: object,
|
48
|
+
awaitable: Awaitable[Any],
|
52
49
|
) -> None:
|
53
50
|
"""
|
54
51
|
Adds a task to the runner that will be executed when run() is called.
|
55
52
|
|
56
53
|
:param task_id: Unique identifier for the task.
|
57
|
-
:param
|
58
|
-
:param args: Positional arguments for async_func.
|
59
|
-
:param kwargs: Keyword arguments for async_func.
|
54
|
+
:param awaitable: The awaitable (coroutine) to execute.
|
60
55
|
:raises RuntimeError: If the runner has already been used.
|
61
56
|
:raises ValueError: If task_id is empty or already exists.
|
62
57
|
"""
|
@@ -70,11 +65,11 @@ class AsyncTaskRunner:
|
|
70
65
|
raise ValueError(f"Task ID '{task_id}' already exists. All task IDs must be unique.")
|
71
66
|
|
72
67
|
self._task_ids.add(task_id)
|
73
|
-
self._tasks.append(AsyncTaskRunner.Task(task_id,
|
68
|
+
self._tasks.append(AsyncTaskRunner.Task(task_id, awaitable))
|
74
69
|
|
75
70
|
async def run(self) -> AsyncTaskRunner.Result:
|
76
71
|
"""
|
77
|
-
Executes all added tasks with concurrency limited by the
|
72
|
+
Executes all added tasks with concurrency limited by the semaphore.
|
78
73
|
If a timeout is specified, non-finished tasks are cancelled.
|
79
74
|
|
80
75
|
:return: AsyncTaskRunner.Result containing task results, exceptions, and flags indicating overall status.
|
@@ -89,24 +84,30 @@ class AsyncTaskRunner:
|
|
89
84
|
is_timeout: bool = False
|
90
85
|
|
91
86
|
async def run_task(task: AsyncTaskRunner.Task) -> None:
|
92
|
-
async with self.
|
87
|
+
async with self.semaphore:
|
93
88
|
try:
|
94
|
-
res: Any = await task.
|
89
|
+
res: Any = await task.awaitable
|
95
90
|
results[task.task_id] = res
|
96
91
|
except Exception as e:
|
97
92
|
exceptions[task.task_id] = e
|
98
93
|
|
94
|
+
# Create asyncio tasks for all runner tasks
|
95
|
+
tasks = [asyncio.create_task(run_task(task)) for task in self._tasks]
|
96
|
+
|
99
97
|
try:
|
100
98
|
if self.timeout is not None:
|
101
|
-
with
|
102
|
-
|
103
|
-
for task in self._tasks:
|
104
|
-
tg.start_soon(run_task, task)
|
99
|
+
# Run with timeout
|
100
|
+
await asyncio.wait_for(asyncio.gather(*tasks), timeout=self.timeout)
|
105
101
|
else:
|
106
|
-
|
107
|
-
|
108
|
-
tg.start_soon(run_task, task)
|
102
|
+
# Run without timeout
|
103
|
+
await asyncio.gather(*tasks)
|
109
104
|
except TimeoutError:
|
105
|
+
# Cancel all running tasks on timeout
|
106
|
+
for task in tasks:
|
107
|
+
if not task.done():
|
108
|
+
task.cancel()
|
109
|
+
# Wait for tasks to complete cancellation
|
110
|
+
await asyncio.gather(*tasks, return_exceptions=True)
|
110
111
|
is_timeout = True
|
111
112
|
|
112
113
|
is_ok: bool = (not exceptions) and (not is_timeout)
|
mm_std/http_.py
CHANGED
@@ -1,12 +1,14 @@
|
|
1
|
+
import asyncio
|
1
2
|
import json
|
2
3
|
from dataclasses import asdict, dataclass, field
|
3
4
|
from typing import Any
|
4
5
|
from urllib.parse import urlencode
|
5
6
|
|
6
|
-
import
|
7
|
-
import httpx
|
7
|
+
import aiohttp
|
8
8
|
import pydash
|
9
9
|
import requests
|
10
|
+
import rich
|
11
|
+
from aiohttp_socks import ProxyConnector
|
10
12
|
from requests.auth import AuthBase
|
11
13
|
|
12
14
|
from mm_std.result import Err, Ok, Result
|
@@ -142,7 +144,7 @@ def hrequest(
|
|
142
144
|
return HResponse(error=f"exception: {err}")
|
143
145
|
|
144
146
|
|
145
|
-
async def
|
147
|
+
async def hrequest_async(
|
146
148
|
url: str,
|
147
149
|
*,
|
148
150
|
method: str = "GET",
|
@@ -150,16 +152,17 @@ async def async_hrequest(
|
|
150
152
|
params: dict[str, Any] | None = None,
|
151
153
|
headers: dict[str, Any] | None = None,
|
152
154
|
cookies: dict[str, Any] | None = None,
|
153
|
-
timeout: float = 10,
|
155
|
+
timeout: float = 10,
|
154
156
|
user_agent: str | None = None,
|
155
157
|
json_params: bool = True,
|
156
|
-
auth:
|
158
|
+
auth: tuple[str, str] | None = None,
|
157
159
|
verify: bool = True,
|
158
160
|
) -> HResponse:
|
159
161
|
query_params: dict[str, Any] | None = None
|
160
162
|
data: dict[str, Any] | None = None
|
161
163
|
json_: dict[str, Any] | None = None
|
162
164
|
method = method.upper()
|
165
|
+
|
163
166
|
if not headers:
|
164
167
|
headers = {}
|
165
168
|
if user_agent:
|
@@ -171,33 +174,54 @@ async def async_hrequest(
|
|
171
174
|
else:
|
172
175
|
data = params
|
173
176
|
|
174
|
-
|
175
|
-
|
176
|
-
|
177
|
-
|
178
|
-
|
179
|
-
|
180
|
-
|
181
|
-
|
182
|
-
|
183
|
-
|
184
|
-
|
185
|
-
|
186
|
-
|
187
|
-
|
188
|
-
|
189
|
-
|
190
|
-
|
191
|
-
|
192
|
-
|
193
|
-
|
194
|
-
|
195
|
-
|
196
|
-
|
197
|
-
|
198
|
-
|
199
|
-
|
200
|
-
|
177
|
+
try:
|
178
|
+
# Configure connector based on proxy type
|
179
|
+
if proxy:
|
180
|
+
# HTTP proxy will be handled in request kwargs
|
181
|
+
connector = ProxyConnector.from_url(proxy) if proxy.startswith("socks5://") else aiohttp.TCPConnector(ssl=verify)
|
182
|
+
else:
|
183
|
+
connector = aiohttp.TCPConnector(ssl=verify)
|
184
|
+
|
185
|
+
timeout_obj = aiohttp.ClientTimeout(total=timeout)
|
186
|
+
|
187
|
+
async with aiohttp.ClientSession(connector=connector, timeout=timeout_obj, cookies=cookies) as session:
|
188
|
+
request_kwargs: dict[str, Any] = {"headers": headers}
|
189
|
+
|
190
|
+
if query_params:
|
191
|
+
request_kwargs["params"] = query_params
|
192
|
+
if json_:
|
193
|
+
request_kwargs["json"] = json_
|
194
|
+
if data:
|
195
|
+
request_kwargs["data"] = data
|
196
|
+
|
197
|
+
if auth and isinstance(auth, tuple) and len(auth) == 2:
|
198
|
+
request_kwargs["auth"] = aiohttp.BasicAuth(auth[0], auth[1])
|
199
|
+
|
200
|
+
# Set HTTP proxy (not needed for SOCKS5)
|
201
|
+
if proxy and not proxy.startswith("socks5://"):
|
202
|
+
request_kwargs["proxy"] = proxy
|
203
|
+
|
204
|
+
try:
|
205
|
+
async with await asyncio.wait_for(session.request(method, url, **request_kwargs), timeout=timeout) as response:
|
206
|
+
body = await response.text()
|
207
|
+
return HResponse(code=response.status, body=body, headers=dict(response.headers))
|
208
|
+
except TimeoutError:
|
209
|
+
return HResponse(error="timeout")
|
210
|
+
except (aiohttp.ClientProxyConnectionError, aiohttp.ClientHttpProxyError):
|
211
|
+
return HResponse(error="proxy_error")
|
212
|
+
except aiohttp.ClientConnectorError as err:
|
213
|
+
return HResponse(error=f"connection_error: {err}")
|
214
|
+
except aiohttp.ClientError as err:
|
215
|
+
rich.inspect(err)
|
216
|
+
return HResponse(error=f"connection_error: {err}")
|
217
|
+
except Exception as err:
|
218
|
+
if "couldn't connect to proxy" in str(err).lower():
|
219
|
+
return HResponse(error="proxy_error")
|
220
|
+
return HResponse(error=f"exception: {err}")
|
221
|
+
except TimeoutError:
|
222
|
+
return HResponse(error="timeout")
|
223
|
+
except Exception as err:
|
224
|
+
return HResponse(error=f"exception: {err}")
|
201
225
|
|
202
226
|
|
203
227
|
def add_query_params_to_url(url: str, params: dict[str, object]) -> str:
|
@@ -208,4 +232,4 @@ def add_query_params_to_url(url: str, params: dict[str, object]) -> str:
|
|
208
232
|
|
209
233
|
|
210
234
|
hr = hrequest
|
211
|
-
|
235
|
+
hra = hrequest_async
|
@@ -1,10 +1,10 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: mm-std
|
3
|
-
Version: 0.3.
|
3
|
+
Version: 0.3.18
|
4
4
|
Requires-Python: >=3.12
|
5
|
-
Requires-Dist:
|
5
|
+
Requires-Dist: aiohttp-socks~=0.10.1
|
6
|
+
Requires-Dist: aiohttp~=3.11.14
|
6
7
|
Requires-Dist: cryptography~=44.0.2
|
7
|
-
Requires-Dist: httpx[socks]>=0.28.1
|
8
8
|
Requires-Dist: pydantic-settings>=2.8.1
|
9
9
|
Requires-Dist: pydantic~=2.10.6
|
10
10
|
Requires-Dist: pydash~=8.0.5
|
@@ -1,4 +1,4 @@
|
|
1
|
-
mm_std/__init__.py,sha256=
|
1
|
+
mm_std/__init__.py,sha256=yxfH7ROIU5jwRQEeFIloTlDPZm_8443PKk50xzmQq7c,2984
|
2
2
|
mm_std/command.py,sha256=ze286wjUjg0QSTgIu-2WZks53_Vclg69UaYYgPpQvCU,1283
|
3
3
|
mm_std/config.py,sha256=4ox4D2CgGR76bvZ2n2vGQOYUDagFnlKEDb87to5zpxE,1871
|
4
4
|
mm_std/crypto.py,sha256=jdk0_TCmeU0pPXMyz9xH6kQHSjjZ9GcGClBwQps5vBo,340
|
@@ -6,7 +6,7 @@ mm_std/date.py,sha256=976eEkSONuNqHQBgSRu8hrtH23tJqztbmHFHLdbP2TY,1879
|
|
6
6
|
mm_std/dict.py,sha256=6GkhJPXD0LiJDxPcYe6jPdEDw-MN7P7mKu6U5XxwYDk,675
|
7
7
|
mm_std/env.py,sha256=5zaR9VeIfObN-4yfgxoFeU5IM1GDeZZj9SuYf7t9sOA,125
|
8
8
|
mm_std/fs.py,sha256=RwarNRJq3tIMG6LVX_g03hasfYpjYFh_O27oVDt5IPQ,291
|
9
|
-
mm_std/http_.py,sha256=
|
9
|
+
mm_std/http_.py,sha256=cozBUGZcbKp9sZuEnu7bklwa6lTE0RxEUVo_aNt1_kE,7468
|
10
10
|
mm_std/json_.py,sha256=Naa6mBE4D0yiQGkPNRrFvndnUH3R7ovw3FeaejWV60o,1196
|
11
11
|
mm_std/log.py,sha256=6ux6njNKc_ZCQlvWn1FZR6vcSY2Cem-mQzmNXvsg5IE,913
|
12
12
|
mm_std/net.py,sha256=qdRCBIDneip6FaPNe5mx31UtYVmzqam_AoUF7ydEyjA,590
|
@@ -19,12 +19,12 @@ mm_std/toml.py,sha256=CNznWKR0bpOxS6e3VB5LGS-Oa9lW-wterkcPUFtPcls,610
|
|
19
19
|
mm_std/types_.py,sha256=9FGd2q47a8M9QQgsWJR1Kq34jLxBAkYSoJuwih4PPqg,257
|
20
20
|
mm_std/zip.py,sha256=axzF1BwcIygtfNNTefZH7hXKaQqwe-ZH3ChuRWr9dnk,396
|
21
21
|
mm_std/concurrency/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
22
|
-
mm_std/concurrency/async_decorators.py,sha256=
|
23
|
-
mm_std/concurrency/async_scheduler.py,sha256
|
24
|
-
mm_std/concurrency/async_task_runner.py,sha256=
|
22
|
+
mm_std/concurrency/async_decorators.py,sha256=xEpyipzp3ZhaPHtdeTE-Ikrt67SUTFKBE6LQPeoeh6Q,1735
|
23
|
+
mm_std/concurrency/async_scheduler.py,sha256=qS3QKMA0xpoxCZWjDW1ItAwKMTQ5h8esXMMRA0eXtxE,5644
|
24
|
+
mm_std/concurrency/async_task_runner.py,sha256=zYC2Jv5taUh8dnyDfWwh394SkzTXtdE9hOhvjV2FWKc,4493
|
25
25
|
mm_std/concurrency/sync_decorators.py,sha256=syCQBOmN7qPO55yzgJB2rbkh10CVww376hmyvs6e5tA,1080
|
26
26
|
mm_std/concurrency/sync_scheduler.py,sha256=j4tBL_cBI1spr0cZplTA7N2CoYsznuORMeRN8rpR6gY,2407
|
27
27
|
mm_std/concurrency/sync_task_runner.py,sha256=s5JPlLYLGQGHIxy4oDS-PN7O9gcy-yPZFoNm8RQwzcw,1780
|
28
|
-
mm_std-0.3.
|
29
|
-
mm_std-0.3.
|
30
|
-
mm_std-0.3.
|
28
|
+
mm_std-0.3.18.dist-info/METADATA,sha256=AUl95Lq0McMhB-E8PsxqPEbYsxuiQI0cA1t5AX208qg,415
|
29
|
+
mm_std-0.3.18.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
30
|
+
mm_std-0.3.18.dist-info/RECORD,,
|
File without changes
|