mm-std 0.3.8__py3-none-any.whl → 0.3.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
mm_std/__init__.py
CHANGED
@@ -1,3 +1,4 @@
|
|
1
|
+
from .async_concurrency import AsyncScheduler as AsyncScheduler
|
1
2
|
from .command import CommandResult as CommandResult
|
2
3
|
from .command import run_command as run_command
|
3
4
|
from .command import run_ssh_command as run_ssh_command
|
@@ -19,6 +20,8 @@ from .http_ import CHROME_USER_AGENT as CHROME_USER_AGENT
|
|
19
20
|
from .http_ import FIREFOX_USER_AGENT as FIREFOX_USER_AGENT
|
20
21
|
from .http_ import HResponse as HResponse
|
21
22
|
from .http_ import add_query_params_to_url as add_query_params_to_url
|
23
|
+
from .http_ import ahr as ahr
|
24
|
+
from .http_ import async_hrequest as async_hrequest
|
22
25
|
from .http_ import hr as hr
|
23
26
|
from .http_ import hrequest as hrequest
|
24
27
|
from .json_ import CustomJSONEncoder as CustomJSONEncoder
|
@@ -0,0 +1,107 @@
|
|
1
|
+
import threading
|
2
|
+
from collections.abc import Awaitable, Callable
|
3
|
+
from dataclasses import dataclass, field
|
4
|
+
from datetime import UTC, datetime
|
5
|
+
from logging import Logger
|
6
|
+
|
7
|
+
import anyio
|
8
|
+
|
9
|
+
# Type aliases for clarity
|
10
|
+
AsyncFunc = Callable[..., Awaitable[object]]
|
11
|
+
Args = tuple[object, ...]
|
12
|
+
Kwargs = dict[str, object]
|
13
|
+
|
14
|
+
|
15
|
+
class AsyncScheduler:
|
16
|
+
@dataclass
|
17
|
+
class TaskInfo:
|
18
|
+
task_id: str
|
19
|
+
interval: float
|
20
|
+
func: AsyncFunc
|
21
|
+
args: Args = ()
|
22
|
+
kwargs: Kwargs = field(default_factory=dict)
|
23
|
+
run_count: int = 0
|
24
|
+
last_run: datetime | None = None
|
25
|
+
running: bool = False
|
26
|
+
|
27
|
+
def __init__(self, logger: Logger) -> None:
|
28
|
+
self.tasks: dict[str, AsyncScheduler.TaskInfo] = {}
|
29
|
+
self._running: bool = False
|
30
|
+
self._cancel_scope: anyio.CancelScope | None = None
|
31
|
+
self._thread: threading.Thread | None = None
|
32
|
+
self._logger = logger
|
33
|
+
|
34
|
+
def add_task(self, task_id: str, interval: float, func: AsyncFunc, args: Args = (), kwargs: Kwargs | None = None) -> None:
|
35
|
+
"""Register a new task with the scheduler."""
|
36
|
+
if kwargs is None:
|
37
|
+
kwargs = {}
|
38
|
+
if task_id in self.tasks:
|
39
|
+
raise ValueError(f"Task with id {task_id} already exists")
|
40
|
+
self.tasks[task_id] = AsyncScheduler.TaskInfo(task_id=task_id, interval=interval, func=func, args=args, kwargs=kwargs)
|
41
|
+
|
42
|
+
async def _run_task(self, task_id: str) -> None:
|
43
|
+
"""Internal loop for running a single task repeatedly."""
|
44
|
+
task = self.tasks[task_id]
|
45
|
+
while self._running:
|
46
|
+
task.last_run = datetime.now(UTC)
|
47
|
+
task.run_count += 1
|
48
|
+
try:
|
49
|
+
await task.func(*task.args, **task.kwargs)
|
50
|
+
except Exception:
|
51
|
+
self._logger.exception("AsyncScheduler exception")
|
52
|
+
|
53
|
+
# Calculate elapsed time and sleep if needed so that tasks never overlap.
|
54
|
+
elapsed = (datetime.now(UTC) - task.last_run).total_seconds()
|
55
|
+
sleep_time = task.interval - elapsed
|
56
|
+
if sleep_time > 0:
|
57
|
+
try:
|
58
|
+
await anyio.sleep(sleep_time)
|
59
|
+
except Exception:
|
60
|
+
self._logger.exception("AsyncScheduler exception")
|
61
|
+
|
62
|
+
async def _start_all_tasks(self) -> None:
|
63
|
+
"""Starts all tasks concurrently in an AnyIO task group."""
|
64
|
+
async with anyio.create_task_group() as tg:
|
65
|
+
self._cancel_scope = tg.cancel_scope
|
66
|
+
for task_id in self.tasks:
|
67
|
+
tg.start_soon(self._run_task, task_id)
|
68
|
+
try:
|
69
|
+
while self._running: # noqa: ASYNC110
|
70
|
+
await anyio.sleep(0.1)
|
71
|
+
except anyio.get_cancelled_exc_class():
|
72
|
+
self._logger.info("Task group cancelled. Exiting _start_all_tasks.")
|
73
|
+
|
74
|
+
def start(self) -> None:
|
75
|
+
"""
|
76
|
+
Start the scheduler.
|
77
|
+
|
78
|
+
This method launches the scheduler in a background thread,
|
79
|
+
which runs an AnyIO event loop.
|
80
|
+
"""
|
81
|
+
if self._running:
|
82
|
+
self._logger.warning("Scheduler already running")
|
83
|
+
return
|
84
|
+
self._running = True
|
85
|
+
self._logger.info("Starting scheduler")
|
86
|
+
self._thread = threading.Thread(target=lambda: anyio.run(self._start_all_tasks), daemon=True)
|
87
|
+
self._thread.start()
|
88
|
+
|
89
|
+
def stop(self) -> None:
|
90
|
+
"""
|
91
|
+
Stop the scheduler.
|
92
|
+
|
93
|
+
The running flag is set to False so that each task's loop will exit.
|
94
|
+
This method then waits for the background thread to finish.
|
95
|
+
"""
|
96
|
+
if not self._running:
|
97
|
+
self._logger.warning("Scheduler not running")
|
98
|
+
return
|
99
|
+
self._logger.info("Stopping scheduler")
|
100
|
+
self._running = False
|
101
|
+
if self._cancel_scope is not None:
|
102
|
+
self._cancel_scope.cancel()
|
103
|
+
|
104
|
+
if self._thread:
|
105
|
+
self._thread.join(timeout=5)
|
106
|
+
self._thread = None
|
107
|
+
self._logger.info("Scheduler stopped")
|
mm_std/config.py
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
import sys
|
2
2
|
import tomllib
|
3
3
|
from pathlib import Path
|
4
|
-
from typing import NoReturn
|
4
|
+
from typing import NoReturn, Self
|
5
5
|
|
6
6
|
from pydantic import BaseModel, ConfigDict, ValidationError
|
7
7
|
|
@@ -35,7 +35,7 @@ class BaseConfig(BaseModel):
|
|
35
35
|
sys.exit(1)
|
36
36
|
|
37
37
|
@classmethod
|
38
|
-
def read_toml_config
|
38
|
+
def read_toml_config(cls, config_path: Path, zip_password: str = "") -> Result[Self]: # nosec
|
39
39
|
try:
|
40
40
|
config_path = config_path.expanduser()
|
41
41
|
if config_path.name.endswith(".zip"):
|
mm_std/http_.py
CHANGED
@@ -5,7 +5,8 @@ from urllib.parse import urlencode
|
|
5
5
|
|
6
6
|
import httpx
|
7
7
|
import pydash
|
8
|
-
|
8
|
+
import requests
|
9
|
+
from requests.auth import AuthBase
|
9
10
|
|
10
11
|
from mm_std.result import Err, Ok, Result
|
11
12
|
|
@@ -90,7 +91,7 @@ def hrequest(
|
|
90
91
|
timeout: float = 10,
|
91
92
|
user_agent: str | None = None,
|
92
93
|
json_params: bool = True,
|
93
|
-
auth:
|
94
|
+
auth: AuthBase | tuple[str, str] | None = None,
|
94
95
|
verify: bool = True,
|
95
96
|
) -> HResponse:
|
96
97
|
query_params: dict[str, Any] | None = None
|
@@ -108,11 +109,18 @@ def hrequest(
|
|
108
109
|
else:
|
109
110
|
data = params
|
110
111
|
|
112
|
+
proxies = None
|
113
|
+
if proxy:
|
114
|
+
proxies = {
|
115
|
+
"http": proxy,
|
116
|
+
"https": proxy,
|
117
|
+
}
|
118
|
+
|
111
119
|
try:
|
112
|
-
r =
|
120
|
+
r = requests.request(
|
113
121
|
method,
|
114
122
|
url,
|
115
|
-
|
123
|
+
proxies=proxies,
|
116
124
|
timeout=timeout,
|
117
125
|
cookies=cookies,
|
118
126
|
auth=auth,
|
@@ -123,11 +131,67 @@ def hrequest(
|
|
123
131
|
data=data,
|
124
132
|
)
|
125
133
|
return HResponse(code=r.status_code, body=r.text, headers=dict(r.headers))
|
134
|
+
except requests.exceptions.Timeout:
|
135
|
+
return HResponse(error="timeout")
|
136
|
+
except requests.exceptions.ProxyError:
|
137
|
+
return HResponse(error="proxy_error")
|
138
|
+
except requests.exceptions.RequestException as err:
|
139
|
+
return HResponse(error=f"connection_error: {err}")
|
140
|
+
except Exception as err:
|
141
|
+
return HResponse(error=f"exception: {err}")
|
142
|
+
|
143
|
+
|
144
|
+
async def async_hrequest(
|
145
|
+
url: str,
|
146
|
+
*,
|
147
|
+
method: str = "GET",
|
148
|
+
proxy: str | None = None,
|
149
|
+
params: dict[str, Any] | None = None,
|
150
|
+
headers: dict[str, Any] | None = None,
|
151
|
+
cookies: dict[str, Any] | None = None,
|
152
|
+
timeout: float = 10, # noqa: ASYNC109
|
153
|
+
user_agent: str | None = None,
|
154
|
+
json_params: bool = True,
|
155
|
+
auth: httpx.Auth | tuple[str, str] | None = None,
|
156
|
+
verify: bool = True,
|
157
|
+
) -> HResponse:
|
158
|
+
query_params: dict[str, Any] | None = None
|
159
|
+
data: dict[str, Any] | None = None
|
160
|
+
json_: dict[str, Any] | None = None
|
161
|
+
method = method.upper()
|
162
|
+
if not headers:
|
163
|
+
headers = {}
|
164
|
+
if user_agent:
|
165
|
+
headers["user-agent"] = user_agent
|
166
|
+
if method == "GET":
|
167
|
+
query_params = params
|
168
|
+
elif json_params:
|
169
|
+
json_ = params
|
170
|
+
else:
|
171
|
+
data = params
|
172
|
+
|
173
|
+
try:
|
174
|
+
async with httpx.AsyncClient(
|
175
|
+
proxy=proxy,
|
176
|
+
timeout=timeout,
|
177
|
+
cookies=cookies,
|
178
|
+
auth=auth,
|
179
|
+
verify=verify,
|
180
|
+
) as client:
|
181
|
+
r = await client.request(
|
182
|
+
method,
|
183
|
+
url,
|
184
|
+
headers=headers,
|
185
|
+
params=query_params,
|
186
|
+
json=json_,
|
187
|
+
data=data,
|
188
|
+
)
|
189
|
+
return HResponse(code=r.status_code, body=r.text, headers=dict(r.headers))
|
126
190
|
except httpx.TimeoutException:
|
127
191
|
return HResponse(error="timeout")
|
128
192
|
except httpx.ProxyError:
|
129
193
|
return HResponse(error="proxy_error")
|
130
|
-
except httpx.
|
194
|
+
except httpx.RequestError as err:
|
131
195
|
return HResponse(error=f"connection_error: {err}")
|
132
196
|
except Exception as err:
|
133
197
|
return HResponse(error=f"exception: {err}")
|
@@ -141,3 +205,4 @@ def add_query_params_to_url(url: str, params: dict[str, object]) -> str:
|
|
141
205
|
|
142
206
|
|
143
207
|
hr = hrequest
|
208
|
+
ahr = async_hrequest
|
@@ -1,11 +1,14 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: mm-std
|
3
|
-
Version: 0.3.
|
3
|
+
Version: 0.3.10
|
4
4
|
Requires-Python: >=3.12
|
5
|
+
Requires-Dist: anyio>=4.9.0
|
5
6
|
Requires-Dist: cryptography~=44.0.2
|
6
|
-
Requires-Dist: httpx[
|
7
|
+
Requires-Dist: httpx[socks]>=0.28.1
|
8
|
+
Requires-Dist: pydantic-settings>=2.8.1
|
7
9
|
Requires-Dist: pydantic~=2.10.6
|
8
10
|
Requires-Dist: pydash~=8.0.5
|
9
11
|
Requires-Dist: python-dotenv~=1.0.1
|
12
|
+
Requires-Dist: requests[socks]~=2.32.3
|
10
13
|
Requires-Dist: rich~=13.9.4
|
11
14
|
Requires-Dist: tomlkit~=0.13.2
|
@@ -1,13 +1,14 @@
|
|
1
|
-
mm_std/__init__.py,sha256=
|
1
|
+
mm_std/__init__.py,sha256=HBJRK8f5Qz3v3XtwT7r5lvaANNknAuoV1NsPuO9yQDc,2646
|
2
|
+
mm_std/async_concurrency.py,sha256=Zp0_tcRhoGJPJ1iueXKTlJHv0IZDDYLgOmpHSvb1kKs,3925
|
2
3
|
mm_std/command.py,sha256=ze286wjUjg0QSTgIu-2WZks53_Vclg69UaYYgPpQvCU,1283
|
3
4
|
mm_std/concurrency.py,sha256=4kKLhde6YQYsjJJjH6K5eMQj6FtegEz55Mo5TmhQMM0,5242
|
4
|
-
mm_std/config.py,sha256=
|
5
|
+
mm_std/config.py,sha256=4ox4D2CgGR76bvZ2n2vGQOYUDagFnlKEDb87to5zpxE,1871
|
5
6
|
mm_std/crypto.py,sha256=jdk0_TCmeU0pPXMyz9xH6kQHSjjZ9GcGClBwQps5vBo,340
|
6
7
|
mm_std/date.py,sha256=976eEkSONuNqHQBgSRu8hrtH23tJqztbmHFHLdbP2TY,1879
|
7
8
|
mm_std/dict.py,sha256=6GkhJPXD0LiJDxPcYe6jPdEDw-MN7P7mKu6U5XxwYDk,675
|
8
9
|
mm_std/env.py,sha256=5zaR9VeIfObN-4yfgxoFeU5IM1GDeZZj9SuYf7t9sOA,125
|
9
10
|
mm_std/fs.py,sha256=RwarNRJq3tIMG6LVX_g03hasfYpjYFh_O27oVDt5IPQ,291
|
10
|
-
mm_std/http_.py,sha256=
|
11
|
+
mm_std/http_.py,sha256=JkyHZ29EOC6Ulqw_5eBNLmeMrca6NDGueTSwpj96aKM,6003
|
11
12
|
mm_std/json_.py,sha256=Naa6mBE4D0yiQGkPNRrFvndnUH3R7ovw3FeaejWV60o,1196
|
12
13
|
mm_std/log.py,sha256=6ux6njNKc_ZCQlvWn1FZR6vcSY2Cem-mQzmNXvsg5IE,913
|
13
14
|
mm_std/net.py,sha256=qdRCBIDneip6FaPNe5mx31UtYVmzqam_AoUF7ydEyjA,590
|
@@ -19,6 +20,6 @@ mm_std/str.py,sha256=BEjJ1p5O4-uSYK0h-enasSSDdwzkBbiwdQ4_dsrlEE8,3257
|
|
19
20
|
mm_std/toml.py,sha256=CNznWKR0bpOxS6e3VB5LGS-Oa9lW-wterkcPUFtPcls,610
|
20
21
|
mm_std/types_.py,sha256=hvZlnvBWyB8CL_MeEWWD0Y0nN677plibYn3yD-5g7xs,99
|
21
22
|
mm_std/zip.py,sha256=axzF1BwcIygtfNNTefZH7hXKaQqwe-ZH3ChuRWr9dnk,396
|
22
|
-
mm_std-0.3.
|
23
|
-
mm_std-0.3.
|
24
|
-
mm_std-0.3.
|
23
|
+
mm_std-0.3.10.dist-info/METADATA,sha256=1i_m2ldTzID9GccEEPB8ZDqs1DOlGsSdVgkoaHhNdZQ,410
|
24
|
+
mm_std-0.3.10.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
25
|
+
mm_std-0.3.10.dist-info/RECORD,,
|
File without changes
|