mm-std 0.3.16__py3-none-any.whl → 0.3.17__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
mm_std/__init__.py CHANGED
@@ -22,10 +22,10 @@ from .http_ import CHROME_USER_AGENT as CHROME_USER_AGENT
22
22
  from .http_ import FIREFOX_USER_AGENT as FIREFOX_USER_AGENT
23
23
  from .http_ import HResponse as HResponse
24
24
  from .http_ import add_query_params_to_url as add_query_params_to_url
25
- from .http_ import ahr as ahr
26
- from .http_ import async_hrequest as async_hrequest
27
25
  from .http_ import hr as hr
26
+ from .http_ import hra as hra
28
27
  from .http_ import hrequest as hrequest
28
+ from .http_ import hrequest_async as hrequest_async
29
29
  from .json_ import CustomJSONEncoder as CustomJSONEncoder
30
30
  from .json_ import json_dumps as json_dumps
31
31
  from .log import init_logger as init_logger
@@ -1,15 +1,14 @@
1
+ import asyncio
1
2
  import functools
2
3
  from collections.abc import Awaitable, Callable
3
4
  from typing import ParamSpec, TypeVar
4
5
 
5
- import anyio
6
-
7
6
  P = ParamSpec("P")
8
7
  R = TypeVar("R")
9
8
 
10
9
 
11
10
  def async_synchronized(func: Callable[P, Awaitable[R]]) -> Callable[P, Awaitable[R]]:
12
- lock = anyio.Lock()
11
+ lock = asyncio.Lock()
13
12
 
14
13
  @functools.wraps(func)
15
14
  async def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
@@ -1,19 +1,25 @@
1
1
  import asyncio
2
- import threading
3
2
  from dataclasses import dataclass, field
4
3
  from datetime import datetime
5
4
  from logging import Logger
6
5
  from typing import Any
7
6
 
8
- import anyio
9
-
10
7
  from mm_std.date import utc_now
11
8
  from mm_std.types_ import Args, AsyncFunc, Kwargs
12
9
 
13
10
 
14
11
  class AsyncScheduler:
12
+ """
13
+ A scheduler for running async tasks at fixed intervals.
14
+
15
+ Each task runs on its own schedule and waits for the specified interval
16
+ between executions.
17
+ """
18
+
15
19
  @dataclass
16
20
  class TaskInfo:
21
+ """Information about a scheduled task."""
22
+
17
23
  task_id: str
18
24
  interval: float
19
25
  func: AsyncFunc
@@ -25,15 +31,32 @@ class AsyncScheduler:
25
31
  running: bool = False
26
32
 
27
33
  def __init__(self, logger: Logger) -> None:
34
+ """
35
+ Initialize the async scheduler.
36
+
37
+ Args:
38
+ logger: Logger instance for recording scheduler events
39
+ """
28
40
  self.tasks: dict[str, AsyncScheduler.TaskInfo] = {}
29
41
  self._running: bool = False
30
- self._cancel_scope: anyio.CancelScope | None = None
42
+ self._tasks: list[asyncio.Task[Any]] = []
31
43
  self._main_task: asyncio.Task[Any] | None = None
32
- self._thread: threading.Thread | None = None
33
44
  self._logger = logger
34
45
 
35
46
  def add_task(self, task_id: str, interval: float, func: AsyncFunc, args: Args = (), kwargs: Kwargs | None = None) -> None:
36
- """Register a new task with the scheduler."""
47
+ """
48
+ Register a new task with the scheduler.
49
+
50
+ Args:
51
+ task_id: Unique identifier for the task
52
+ interval: Time in seconds between task executions
53
+ func: Async function to execute
54
+ args: Positional arguments to pass to the function
55
+ kwargs: Keyword arguments to pass to the function
56
+
57
+ Raises:
58
+ ValueError: If a task with the same ID already exists
59
+ """
37
60
  if kwargs is None:
38
61
  kwargs = {}
39
62
  if task_id in self.tasks:
@@ -41,70 +64,90 @@ class AsyncScheduler:
41
64
  self.tasks[task_id] = AsyncScheduler.TaskInfo(task_id=task_id, interval=interval, func=func, args=args, kwargs=kwargs)
42
65
 
43
66
  async def _run_task(self, task_id: str) -> None:
44
- """Internal loop for running a single task repeatedly."""
67
+ """
68
+ Internal loop for running a single task repeatedly.
69
+
70
+ Args:
71
+ task_id: ID of the task to run
72
+ """
45
73
  task = self.tasks[task_id]
46
- while self._running:
47
- task.last_run = utc_now()
48
- task.run_count += 1
49
- try:
50
- await task.func(*task.args, **task.kwargs)
51
- except Exception:
52
- task.error_count += 1
53
- self._logger.exception("AsyncScheduler exception")
54
-
55
- # Calculate elapsed time and sleep if needed so that tasks never overlap.
56
- elapsed = (utc_now() - task.last_run).total_seconds()
57
- sleep_time = task.interval - elapsed
58
- if sleep_time > 0:
74
+ task.running = True
75
+
76
+ try:
77
+ while self._running:
78
+ task.last_run = utc_now()
79
+ task.run_count += 1
59
80
  try:
60
- await anyio.sleep(sleep_time)
81
+ await task.func(*task.args, **task.kwargs)
61
82
  except Exception:
62
- self._logger.exception("AsyncScheduler exception")
83
+ task.error_count += 1
84
+ self._logger.exception(f"Exception in task {task_id}")
85
+
86
+ # Calculate elapsed time and sleep if needed
87
+ elapsed = (utc_now() - task.last_run).total_seconds()
88
+ sleep_time = max(0, task.interval - elapsed)
89
+ if sleep_time > 0:
90
+ try:
91
+ await asyncio.sleep(sleep_time)
92
+ except asyncio.CancelledError:
93
+ break
94
+ finally:
95
+ task.running = False
96
+ self._logger.debug(f"Task {task_id} stopped")
63
97
 
64
98
  async def _start_all_tasks(self) -> None:
65
- """Starts all tasks concurrently in an AnyIO task group."""
66
- async with anyio.create_task_group() as tg:
67
- self._cancel_scope = tg.cancel_scope
68
- for task_id in self.tasks:
69
- tg.start_soon(self._run_task, task_id)
70
- try:
71
- while self._running: # noqa: ASYNC110
72
- await anyio.sleep(0.1)
73
- except anyio.get_cancelled_exc_class():
74
- self._logger.debug("Task group cancelled. Exiting _start_all_tasks.")
99
+ """Starts all tasks concurrently using asyncio tasks."""
100
+ self._tasks = []
101
+
102
+ for task_id in self.tasks:
103
+ task = asyncio.create_task(self._run_task(task_id))
104
+ self._tasks.append(task)
105
+
106
+ try:
107
+ # Keep the main task alive while the scheduler is running
108
+ while self._running: # noqa: ASYNC110
109
+ await asyncio.sleep(0.1)
110
+ except asyncio.CancelledError:
111
+ self._logger.debug("Main scheduler task cancelled")
112
+ finally:
113
+ # Cancel all running tasks when we exit
114
+ for task in self._tasks:
115
+ if not task.done():
116
+ task.cancel()
117
+
118
+ # Wait for all tasks to finish
119
+ if self._tasks:
120
+ await asyncio.gather(*self._tasks, return_exceptions=True)
121
+ self._tasks = []
75
122
 
76
123
  def start(self) -> None:
77
124
  """
78
125
  Start the scheduler.
79
126
 
80
- This method launches the scheduler in a background thread,
81
- which runs an AnyIO event loop.
127
+ Creates tasks in the current event loop for each registered task.
82
128
  """
83
129
  if self._running:
84
130
  self._logger.warning("AsyncScheduler already running")
85
131
  return
132
+
86
133
  self._running = True
87
134
  self._logger.debug("Starting AsyncScheduler")
88
-
89
- # Create a task in the current event loop
90
135
  self._main_task = asyncio.create_task(self._start_all_tasks())
91
136
 
92
137
  def stop(self) -> None:
93
138
  """
94
139
  Stop the scheduler.
95
140
 
96
- The running flag is set to False so that each task's loop will exit.
97
- This method then waits for the background thread to finish.
141
+ Cancels all running tasks and waits for them to complete.
98
142
  """
99
143
  if not self._running:
100
144
  self._logger.warning("AsyncScheduler not running")
101
145
  return
146
+
102
147
  self._logger.debug("Stopping AsyncScheduler")
103
148
  self._running = False
104
- if self._cancel_scope is not None:
105
- self._cancel_scope.cancel()
106
149
 
107
- if self._thread:
108
- self._thread.join(timeout=5)
109
- self._thread = None
150
+ if self._main_task and not self._main_task.done():
151
+ self._main_task.cancel()
152
+
110
153
  self._logger.debug("AsyncScheduler stopped")
@@ -1,11 +1,10 @@
1
1
  from __future__ import annotations
2
2
 
3
- from collections.abc import Awaitable, Callable
3
+ import asyncio
4
+ from collections.abc import Awaitable
4
5
  from dataclasses import dataclass
5
6
  from typing import Any
6
7
 
7
- import anyio
8
-
9
8
 
10
9
  class AsyncTaskRunner:
11
10
  """
@@ -25,20 +24,20 @@ class AsyncTaskRunner:
25
24
  """Individual task representation"""
26
25
 
27
26
  task_id: str
28
- async_func: Callable[..., Awaitable[Any]]
29
- args: tuple[Any, ...]
30
- kwargs: dict[str, Any]
27
+ awaitable: Awaitable[Any]
31
28
 
32
- def __init__(self, max_concurrent_tasks: int, timeout: float | None = None) -> None:
29
+ def __init__(self, max_concurrent_tasks: int, timeout: float | None = None, name: str | None = None) -> None:
33
30
  """
34
31
  :param max_concurrent_tasks: Maximum number of tasks that can run concurrently.
35
32
  :param timeout: Optional overall timeout in seconds for running all tasks.
33
+ :param name: Optional name for the runner.
36
34
  """
37
35
  if timeout is not None and timeout <= 0:
38
36
  raise ValueError("Timeout must be positive if specified.")
39
37
  self.max_concurrent_tasks: int = max_concurrent_tasks
40
38
  self.timeout: float | None = timeout
41
- self.limiter: anyio.CapacityLimiter = anyio.CapacityLimiter(max_concurrent_tasks)
39
+ self.name = name
40
+ self.semaphore: asyncio.Semaphore = asyncio.Semaphore(max_concurrent_tasks)
42
41
  self._tasks: list[AsyncTaskRunner.Task] = []
43
42
  self._was_run: bool = False
44
43
  self._task_ids: set[str] = set()
@@ -46,17 +45,13 @@ class AsyncTaskRunner:
46
45
  def add_task(
47
46
  self,
48
47
  task_id: str,
49
- async_func: Callable[..., Awaitable[Any]],
50
- *args: object,
51
- **kwargs: object,
48
+ awaitable: Awaitable[Any],
52
49
  ) -> None:
53
50
  """
54
51
  Adds a task to the runner that will be executed when run() is called.
55
52
 
56
53
  :param task_id: Unique identifier for the task.
57
- :param async_func: The asynchronous function to execute.
58
- :param args: Positional arguments for async_func.
59
- :param kwargs: Keyword arguments for async_func.
54
+ :param awaitable: The awaitable (coroutine) to execute.
60
55
  :raises RuntimeError: If the runner has already been used.
61
56
  :raises ValueError: If task_id is empty or already exists.
62
57
  """
@@ -70,11 +65,11 @@ class AsyncTaskRunner:
70
65
  raise ValueError(f"Task ID '{task_id}' already exists. All task IDs must be unique.")
71
66
 
72
67
  self._task_ids.add(task_id)
73
- self._tasks.append(AsyncTaskRunner.Task(task_id, async_func, args, kwargs))
68
+ self._tasks.append(AsyncTaskRunner.Task(task_id, awaitable))
74
69
 
75
70
  async def run(self) -> AsyncTaskRunner.Result:
76
71
  """
77
- Executes all added tasks with concurrency limited by the capacity limiter.
72
+ Executes all added tasks with concurrency limited by the semaphore.
78
73
  If a timeout is specified, non-finished tasks are cancelled.
79
74
 
80
75
  :return: AsyncTaskRunner.Result containing task results, exceptions, and flags indicating overall status.
@@ -89,24 +84,30 @@ class AsyncTaskRunner:
89
84
  is_timeout: bool = False
90
85
 
91
86
  async def run_task(task: AsyncTaskRunner.Task) -> None:
92
- async with self.limiter:
87
+ async with self.semaphore:
93
88
  try:
94
- res: Any = await task.async_func(*task.args, **task.kwargs)
89
+ res: Any = await task.awaitable
95
90
  results[task.task_id] = res
96
91
  except Exception as e:
97
92
  exceptions[task.task_id] = e
98
93
 
94
+ # Create asyncio tasks for all runner tasks
95
+ tasks = [asyncio.create_task(run_task(task)) for task in self._tasks]
96
+
99
97
  try:
100
98
  if self.timeout is not None:
101
- with anyio.fail_after(self.timeout):
102
- async with anyio.create_task_group() as tg:
103
- for task in self._tasks:
104
- tg.start_soon(run_task, task)
99
+ # Run with timeout
100
+ await asyncio.wait_for(asyncio.gather(*tasks), timeout=self.timeout)
105
101
  else:
106
- async with anyio.create_task_group() as tg:
107
- for task in self._tasks:
108
- tg.start_soon(run_task, task)
102
+ # Run without timeout
103
+ await asyncio.gather(*tasks)
109
104
  except TimeoutError:
105
+ # Cancel all running tasks on timeout
106
+ for task in tasks:
107
+ if not task.done():
108
+ task.cancel()
109
+ # Wait for tasks to complete cancellation
110
+ await asyncio.gather(*tasks, return_exceptions=True)
110
111
  is_timeout = True
111
112
 
112
113
  is_ok: bool = (not exceptions) and (not is_timeout)
mm_std/http_.py CHANGED
@@ -1,12 +1,14 @@
1
+ import asyncio
1
2
  import json
2
3
  from dataclasses import asdict, dataclass, field
3
4
  from typing import Any
4
5
  from urllib.parse import urlencode
5
6
 
6
- import anyio
7
- import httpx
7
+ import aiohttp
8
8
  import pydash
9
9
  import requests
10
+ import rich
11
+ from aiohttp_socks import ProxyConnector
10
12
  from requests.auth import AuthBase
11
13
 
12
14
  from mm_std.result import Err, Ok, Result
@@ -142,7 +144,7 @@ def hrequest(
142
144
  return HResponse(error=f"exception: {err}")
143
145
 
144
146
 
145
- async def async_hrequest(
147
+ async def hrequest_async(
146
148
  url: str,
147
149
  *,
148
150
  method: str = "GET",
@@ -150,16 +152,17 @@ async def async_hrequest(
150
152
  params: dict[str, Any] | None = None,
151
153
  headers: dict[str, Any] | None = None,
152
154
  cookies: dict[str, Any] | None = None,
153
- timeout: float = 10, # noqa: ASYNC109
155
+ timeout: float = 10,
154
156
  user_agent: str | None = None,
155
157
  json_params: bool = True,
156
- auth: httpx.Auth | tuple[str, str] | None = None,
158
+ auth: tuple[str, str] | None = None,
157
159
  verify: bool = True,
158
160
  ) -> HResponse:
159
161
  query_params: dict[str, Any] | None = None
160
162
  data: dict[str, Any] | None = None
161
163
  json_: dict[str, Any] | None = None
162
164
  method = method.upper()
165
+
163
166
  if not headers:
164
167
  headers = {}
165
168
  if user_agent:
@@ -171,33 +174,54 @@ async def async_hrequest(
171
174
  else:
172
175
  data = params
173
176
 
174
- with anyio.move_on_after(timeout):
175
- try:
176
- async with httpx.AsyncClient(
177
- proxy=proxy,
178
- timeout=timeout,
179
- cookies=cookies,
180
- auth=auth,
181
- verify=verify,
182
- ) as client:
183
- r = await client.request(
184
- method,
185
- url,
186
- headers=headers,
187
- params=query_params,
188
- json=json_,
189
- data=data,
190
- )
191
- return HResponse(code=r.status_code, body=r.text, headers=dict(r.headers))
192
- except httpx.TimeoutException:
193
- return HResponse(error="timeout")
194
- except httpx.ProxyError:
195
- return HResponse(error="proxy_error")
196
- except httpx.RequestError as err:
197
- return HResponse(error=f"connection_error: {err}")
198
- except Exception as err:
199
- return HResponse(error=f"exception: {err}")
200
- return HResponse(error="timeout")
177
+ try:
178
+ # Configure connector based on proxy type
179
+ if proxy:
180
+ # HTTP proxy will be handled in request kwargs
181
+ connector = ProxyConnector.from_url(proxy) if proxy.startswith("socks5://") else aiohttp.TCPConnector(ssl=verify)
182
+ else:
183
+ connector = aiohttp.TCPConnector(ssl=verify)
184
+
185
+ timeout_obj = aiohttp.ClientTimeout(total=timeout)
186
+
187
+ async with aiohttp.ClientSession(connector=connector, timeout=timeout_obj, cookies=cookies) as session:
188
+ request_kwargs: dict[str, Any] = {"headers": headers}
189
+
190
+ if query_params:
191
+ request_kwargs["params"] = query_params
192
+ if json_:
193
+ request_kwargs["json"] = json_
194
+ if data:
195
+ request_kwargs["data"] = data
196
+
197
+ if auth and isinstance(auth, tuple) and len(auth) == 2:
198
+ request_kwargs["auth"] = aiohttp.BasicAuth(auth[0], auth[1])
199
+
200
+ # Set HTTP proxy (not needed for SOCKS5)
201
+ if proxy and not proxy.startswith("socks5://"):
202
+ request_kwargs["proxy"] = proxy
203
+
204
+ try:
205
+ async with await asyncio.wait_for(session.request(method, url, **request_kwargs), timeout=timeout) as response:
206
+ body = await response.text()
207
+ return HResponse(code=response.status, body=body, headers=dict(response.headers))
208
+ except TimeoutError:
209
+ return HResponse(error="timeout")
210
+ except (aiohttp.ClientProxyConnectionError, aiohttp.ClientHttpProxyError):
211
+ return HResponse(error="proxy_error")
212
+ except aiohttp.ClientConnectorError as err:
213
+ return HResponse(error=f"connection_error: {err}")
214
+ except aiohttp.ClientError as err:
215
+ rich.inspect(err)
216
+ return HResponse(error=f"connection_error: {err}")
217
+ except Exception as err:
218
+ if "couldn't connect to proxy" in str(err).lower():
219
+ return HResponse(error="proxy_error")
220
+ return HResponse(error=f"exception: {err}")
221
+ except TimeoutError:
222
+ return HResponse(error="timeout")
223
+ except Exception as err:
224
+ return HResponse(error=f"exception: {err}")
201
225
 
202
226
 
203
227
  def add_query_params_to_url(url: str, params: dict[str, object]) -> str:
@@ -208,4 +232,4 @@ def add_query_params_to_url(url: str, params: dict[str, object]) -> str:
208
232
 
209
233
 
210
234
  hr = hrequest
211
- ahr = async_hrequest
235
+ hra = hrequest_async
@@ -1,10 +1,10 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: mm-std
3
- Version: 0.3.16
3
+ Version: 0.3.17
4
4
  Requires-Python: >=3.12
5
- Requires-Dist: anyio>=4.9.0
5
+ Requires-Dist: aiohttp-socks~=0.10.1
6
+ Requires-Dist: aiohttp~=3.11.14
6
7
  Requires-Dist: cryptography~=44.0.2
7
- Requires-Dist: httpx[socks]>=0.28.1
8
8
  Requires-Dist: pydantic-settings>=2.8.1
9
9
  Requires-Dist: pydantic~=2.10.6
10
10
  Requires-Dist: pydash~=8.0.5
@@ -1,4 +1,4 @@
1
- mm_std/__init__.py,sha256=UkRzp_lm7yddXImD8nRlJtBnx9YlopWJKqukcbqjRyo,2881
1
+ mm_std/__init__.py,sha256=wQywfba2Uxe5-IKqz3pvaR-p5miYM9DRQ0Qsdq23GTQ,2881
2
2
  mm_std/command.py,sha256=ze286wjUjg0QSTgIu-2WZks53_Vclg69UaYYgPpQvCU,1283
3
3
  mm_std/config.py,sha256=4ox4D2CgGR76bvZ2n2vGQOYUDagFnlKEDb87to5zpxE,1871
4
4
  mm_std/crypto.py,sha256=jdk0_TCmeU0pPXMyz9xH6kQHSjjZ9GcGClBwQps5vBo,340
@@ -6,7 +6,7 @@ mm_std/date.py,sha256=976eEkSONuNqHQBgSRu8hrtH23tJqztbmHFHLdbP2TY,1879
6
6
  mm_std/dict.py,sha256=6GkhJPXD0LiJDxPcYe6jPdEDw-MN7P7mKu6U5XxwYDk,675
7
7
  mm_std/env.py,sha256=5zaR9VeIfObN-4yfgxoFeU5IM1GDeZZj9SuYf7t9sOA,125
8
8
  mm_std/fs.py,sha256=RwarNRJq3tIMG6LVX_g03hasfYpjYFh_O27oVDt5IPQ,291
9
- mm_std/http_.py,sha256=x5d4wnQcPlB_IYaYNCQGz7iOTEft8cQmMggHt30o7xI,6193
9
+ mm_std/http_.py,sha256=cozBUGZcbKp9sZuEnu7bklwa6lTE0RxEUVo_aNt1_kE,7468
10
10
  mm_std/json_.py,sha256=Naa6mBE4D0yiQGkPNRrFvndnUH3R7ovw3FeaejWV60o,1196
11
11
  mm_std/log.py,sha256=6ux6njNKc_ZCQlvWn1FZR6vcSY2Cem-mQzmNXvsg5IE,913
12
12
  mm_std/net.py,sha256=qdRCBIDneip6FaPNe5mx31UtYVmzqam_AoUF7ydEyjA,590
@@ -19,12 +19,12 @@ mm_std/toml.py,sha256=CNznWKR0bpOxS6e3VB5LGS-Oa9lW-wterkcPUFtPcls,610
19
19
  mm_std/types_.py,sha256=9FGd2q47a8M9QQgsWJR1Kq34jLxBAkYSoJuwih4PPqg,257
20
20
  mm_std/zip.py,sha256=axzF1BwcIygtfNNTefZH7hXKaQqwe-ZH3ChuRWr9dnk,396
21
21
  mm_std/concurrency/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
22
- mm_std/concurrency/async_decorators.py,sha256=tJ5u-_G9mAF3zrFr1aBUhYh03rB-K_6oiDKGdmGdlGk,449
23
- mm_std/concurrency/async_scheduler.py,sha256=-crLjrFwRJSNGdyhU3brCvqP7EPPLwEVfZFbDCBe9Qo,3995
24
- mm_std/concurrency/async_task_runner.py,sha256=D4fD_hwSVy55sM_-U1_sdVYlkahnQeigXdMfbIi8Rpc,4492
22
+ mm_std/concurrency/async_decorators.py,sha256=srJXjngf7InLSyD4sazDPNNtZBx9JddxDmH9WZi7s7g,452
23
+ mm_std/concurrency/async_scheduler.py,sha256=bqC43svC_vIPV3U6iUxUzB6tdmEC2OZ2kYKld5sf9QE,5015
24
+ mm_std/concurrency/async_task_runner.py,sha256=zYC2Jv5taUh8dnyDfWwh394SkzTXtdE9hOhvjV2FWKc,4493
25
25
  mm_std/concurrency/sync_decorators.py,sha256=syCQBOmN7qPO55yzgJB2rbkh10CVww376hmyvs6e5tA,1080
26
26
  mm_std/concurrency/sync_scheduler.py,sha256=j4tBL_cBI1spr0cZplTA7N2CoYsznuORMeRN8rpR6gY,2407
27
27
  mm_std/concurrency/sync_task_runner.py,sha256=s5JPlLYLGQGHIxy4oDS-PN7O9gcy-yPZFoNm8RQwzcw,1780
28
- mm_std-0.3.16.dist-info/METADATA,sha256=tXTemaQ7P5DOoxtamgJjtXkXVrhzvHOaaNyH4PavjYg,410
29
- mm_std-0.3.16.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
30
- mm_std-0.3.16.dist-info/RECORD,,
28
+ mm_std-0.3.17.dist-info/METADATA,sha256=R2m2TLzehKG-9kPy56CyRhOL_ndpu_aUMjx6eSsH3PY,415
29
+ mm_std-0.3.17.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
30
+ mm_std-0.3.17.dist-info/RECORD,,