glow 0.15.5__tar.gz → 0.15.6__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {glow-0.15.5 → glow-0.15.6}/PKG-INFO +1 -1
- {glow-0.15.5 → glow-0.15.6}/pyproject.toml +1 -1
- {glow-0.15.5 → glow-0.15.6}/src/glow/_async.py +3 -12
- {glow-0.15.5 → glow-0.15.6}/src/glow/_async.pyi +2 -1
- {glow-0.15.5 → glow-0.15.6}/src/glow/_cache.py +8 -9
- {glow-0.15.5 → glow-0.15.6}/src/glow/_cache.pyi +2 -1
- {glow-0.15.5 → glow-0.15.6}/src/glow/_concurrency.py +7 -9
- {glow-0.15.5 → glow-0.15.6}/src/glow/_concurrency.pyi +5 -4
- {glow-0.15.5 → glow-0.15.6}/src/glow/_coro.py +1 -1
- {glow-0.15.5 → glow-0.15.6}/src/glow/_debug.py +13 -6
- {glow-0.15.5 → glow-0.15.6}/src/glow/_futures.py +22 -2
- {glow-0.15.5 → glow-0.15.6}/src/glow/_import_hook.py +4 -4
- {glow-0.15.5 → glow-0.15.6}/src/glow/_parallel.py +10 -6
- {glow-0.15.5 → glow-0.15.6}/src/glow/_profile.py +8 -8
- {glow-0.15.5 → glow-0.15.6}/src/glow/_profile.pyi +7 -7
- {glow-0.15.5 → glow-0.15.6}/src/glow/_reduction.py +2 -1
- {glow-0.15.5 → glow-0.15.6}/src/glow/_streams.py +4 -3
- glow-0.15.6/src/glow/_types.py +31 -0
- {glow-0.15.5 → glow-0.15.6}/src/glow/_uuid.py +7 -1
- {glow-0.15.5 → glow-0.15.6}/src/glow/_wrap.py +3 -1
- {glow-0.15.5 → glow-0.15.6}/src/glow/cli.py +10 -11
- {glow-0.15.5 → glow-0.15.6}/src/glow/cli.pyi +3 -1
- {glow-0.15.5 → glow-0.15.6}/src/glow/io/_sound.py +20 -8
- {glow-0.15.5 → glow-0.15.6}/test/test_cli.py +7 -0
- glow-0.15.5/src/glow/_types.py +0 -53
- {glow-0.15.5 → glow-0.15.6}/.gitignore +0 -0
- {glow-0.15.5 → glow-0.15.6}/LICENSE +0 -0
- {glow-0.15.5 → glow-0.15.6}/README.md +0 -0
- {glow-0.15.5 → glow-0.15.6}/src/glow/__init__.py +0 -0
- {glow-0.15.5 → glow-0.15.6}/src/glow/_array.py +0 -0
- {glow-0.15.5 → glow-0.15.6}/src/glow/_dev.py +0 -0
- {glow-0.15.5 → glow-0.15.6}/src/glow/_ic.py +0 -0
- {glow-0.15.5 → glow-0.15.6}/src/glow/_imutil.py +0 -0
- {glow-0.15.5 → glow-0.15.6}/src/glow/_keys.py +0 -0
- {glow-0.15.5 → glow-0.15.6}/src/glow/_logging.py +0 -0
- {glow-0.15.5 → glow-0.15.6}/src/glow/_more.py +0 -0
- {glow-0.15.5 → glow-0.15.6}/src/glow/_parallel.pyi +0 -0
- {glow-0.15.5 → glow-0.15.6}/src/glow/_patch_len.py +0 -0
- {glow-0.15.5 → glow-0.15.6}/src/glow/_patch_print.py +0 -0
- {glow-0.15.5 → glow-0.15.6}/src/glow/_patch_scipy.py +0 -0
- {glow-0.15.5 → glow-0.15.6}/src/glow/_repr.py +0 -0
- {glow-0.15.5 → glow-0.15.6}/src/glow/_reusable.py +0 -0
- {glow-0.15.5 → glow-0.15.6}/src/glow/_sizeof.py +0 -0
- {glow-0.15.5 → glow-0.15.6}/src/glow/_thread_quota.py +0 -0
- {glow-0.15.5 → glow-0.15.6}/src/glow/api/__init__.py +0 -0
- {glow-0.15.5 → glow-0.15.6}/src/glow/api/config.py +0 -0
- {glow-0.15.5 → glow-0.15.6}/src/glow/api/exporting.py +0 -0
- {glow-0.15.5 → glow-0.15.6}/src/glow/io/__init__.py +0 -0
- {glow-0.15.5 → glow-0.15.6}/src/glow/io/_svg.py +0 -0
- {glow-0.15.5 → glow-0.15.6}/src/glow/py.typed +0 -0
- {glow-0.15.5 → glow-0.15.6}/test/__init__.py +0 -0
- {glow-0.15.5 → glow-0.15.6}/test/test_api.py +0 -0
- {glow-0.15.5 → glow-0.15.6}/test/test_batch.py +0 -0
- {glow-0.15.5 → glow-0.15.6}/test/test_buffered.py +0 -0
- {glow-0.15.5 → glow-0.15.6}/test/test_iter.py +0 -0
- {glow-0.15.5 → glow-0.15.6}/test/test_shm.py +0 -0
- {glow-0.15.5 → glow-0.15.6}/test/test_thread_pool.py +0 -0
- {glow-0.15.5 → glow-0.15.6}/test/test_timed.py +0 -0
- {glow-0.15.5 → glow-0.15.6}/test/test_timer.py +0 -0
- {glow-0.15.5 → glow-0.15.6}/test/test_uuid.py +0 -0
|
@@ -17,17 +17,8 @@ from functools import partial
|
|
|
17
17
|
from typing import TypeGuard, cast, overload
|
|
18
18
|
|
|
19
19
|
from ._dev import hide_frame
|
|
20
|
-
from ._futures import adispatch
|
|
21
|
-
from ._types import
|
|
22
|
-
ABatchDecorator,
|
|
23
|
-
ABatchFn,
|
|
24
|
-
AnyFuture,
|
|
25
|
-
AnyIterable,
|
|
26
|
-
AnyIterator,
|
|
27
|
-
Coro,
|
|
28
|
-
)
|
|
29
|
-
|
|
30
|
-
type _Job[T, R] = tuple[T, AnyFuture[R]]
|
|
20
|
+
from ._futures import ABatchDecorator, ABatchFn, Job, adispatch
|
|
21
|
+
from ._types import AnyIterable, AnyIterator, Coro
|
|
31
22
|
|
|
32
23
|
|
|
33
24
|
async def amap_dict[K, T1, T2](
|
|
@@ -263,7 +254,7 @@ def astreaming[T, R](
|
|
|
263
254
|
assert batch_size is None or batch_size >= 1
|
|
264
255
|
assert timeout > 0
|
|
265
256
|
|
|
266
|
-
buf: list[
|
|
257
|
+
buf: list[Job[T, R]] = []
|
|
267
258
|
deadline = float('-inf')
|
|
268
259
|
not_last = asyncio.Event()
|
|
269
260
|
lock = asyncio.Lock()
|
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
from collections.abc import AsyncIterator, Callable, Mapping
|
|
2
2
|
from typing import Any, Required, TypedDict, Unpack, overload
|
|
3
3
|
|
|
4
|
-
from .
|
|
4
|
+
from ._futures import ABatchDecorator, ABatchFn
|
|
5
|
+
from ._types import AnyIterable, Coro
|
|
5
6
|
|
|
6
7
|
class _AmapKwargs(TypedDict, total=False):
|
|
7
8
|
limit: Required[int]
|
|
@@ -21,20 +21,19 @@ from typing import Any, Final, Protocol, SupportsInt, cast
|
|
|
21
21
|
from weakref import WeakValueDictionary
|
|
22
22
|
|
|
23
23
|
from ._dev import clone_exc, hide_frame
|
|
24
|
-
from ._futures import
|
|
25
|
-
from ._keys import make_key
|
|
26
|
-
from ._repr import si_bin
|
|
27
|
-
from ._sizeof import sizeof
|
|
28
|
-
from ._types import (
|
|
24
|
+
from ._futures import (
|
|
29
25
|
ABatchFn,
|
|
30
26
|
AnyFuture,
|
|
31
27
|
BatchFn,
|
|
32
|
-
CachePolicy,
|
|
33
|
-
Decorator,
|
|
34
28
|
Job,
|
|
35
|
-
|
|
36
|
-
|
|
29
|
+
adispatch,
|
|
30
|
+
dispatch,
|
|
31
|
+
gather_fs,
|
|
37
32
|
)
|
|
33
|
+
from ._keys import make_key
|
|
34
|
+
from ._repr import si_bin
|
|
35
|
+
from ._sizeof import sizeof
|
|
36
|
+
from ._types import CachePolicy, Decorator, KeyFn, Some
|
|
38
37
|
|
|
39
38
|
|
|
40
39
|
class _Empty(enum.Enum):
|
|
@@ -19,15 +19,15 @@ from warnings import warn
|
|
|
19
19
|
|
|
20
20
|
from ._cache import memoize
|
|
21
21
|
from ._dev import hide_frame
|
|
22
|
-
from ._futures import dispatch, gather_fs
|
|
23
|
-
from ._types import
|
|
22
|
+
from ._futures import BatchDecorator, BatchFn, Job, dispatch, gather_fs
|
|
23
|
+
from ._types import Get
|
|
24
24
|
|
|
25
25
|
_PATIENCE = 0.01
|
|
26
26
|
|
|
27
27
|
|
|
28
|
-
def threadlocal[T](
|
|
29
|
-
fn: Callable[
|
|
30
|
-
) ->
|
|
28
|
+
def threadlocal[**P, T](
|
|
29
|
+
fn: Callable[P, T], /, *args: P.args, **kwargs: P.kwargs
|
|
30
|
+
) -> Get[T]:
|
|
31
31
|
"""Create thread-local singleton factory function (functools.partial)."""
|
|
32
32
|
local_ = threading.local()
|
|
33
33
|
|
|
@@ -41,7 +41,7 @@ def threadlocal[T](
|
|
|
41
41
|
return update_wrapper(wrapper, fn)
|
|
42
42
|
|
|
43
43
|
|
|
44
|
-
def call_once[T](fn:
|
|
44
|
+
def call_once[T](fn: Get[T], /) -> Get[T]:
|
|
45
45
|
"""Make callable a singleton.
|
|
46
46
|
|
|
47
47
|
Supports async-def functions (but not async-gen functions).
|
|
@@ -81,8 +81,6 @@ def weak_memoize[**P, R](fn: Callable[P, R], /) -> Callable[P, R]:
|
|
|
81
81
|
|
|
82
82
|
# ----------------------------- batch collation ------------------------------
|
|
83
83
|
|
|
84
|
-
type _Job[T, R] = tuple[T, Future[R]]
|
|
85
|
-
|
|
86
84
|
|
|
87
85
|
def _fetch_batch[T](
|
|
88
86
|
q: SimpleQueue[T], batch_size: int | None, timeout: float
|
|
@@ -120,7 +118,7 @@ def _start_fetch_compute[T, R](
|
|
|
120
118
|
workers: int,
|
|
121
119
|
batch_size: int | None,
|
|
122
120
|
timeout: float,
|
|
123
|
-
) -> SimpleQueue[
|
|
121
|
+
) -> SimpleQueue[Job[T, R]]:
|
|
124
122
|
q = SimpleQueue() # type: ignore[var-annotated]
|
|
125
123
|
lock = Lock()
|
|
126
124
|
|
|
@@ -2,13 +2,14 @@ from collections.abc import Callable
|
|
|
2
2
|
from contextlib import AbstractContextManager
|
|
3
3
|
from typing import overload
|
|
4
4
|
|
|
5
|
-
from .
|
|
5
|
+
from ._futures import BatchDecorator, BatchFn
|
|
6
|
+
from ._types import Get
|
|
6
7
|
|
|
7
8
|
def threadlocal[T, **P](
|
|
8
|
-
fn: Callable[P, T], *args: P.args, **kwargs: P.kwargs
|
|
9
|
-
) ->
|
|
9
|
+
fn: Callable[P, T], /, *args: P.args, **kwargs: P.kwargs
|
|
10
|
+
) -> Get[T]: ...
|
|
10
11
|
def interpreter_lock(timeout: float = ...) -> AbstractContextManager[None]: ...
|
|
11
|
-
def call_once[T](fn:
|
|
12
|
+
def call_once[T](fn: Get[T], /) -> Get[T]: ...
|
|
12
13
|
def shared_call[**P, R](fn: Callable[P, R], /) -> Callable[P, R]: ...
|
|
13
14
|
def weak_memoize[**P, R](fn: Callable[P, R], /) -> Callable[P, R]: ...
|
|
14
15
|
@overload
|
|
@@ -29,7 +29,7 @@ class _Sync[Y, S, R](wrapt.ObjectProxy): # type: ignore[misc]
|
|
|
29
29
|
self._self_lock = Lock()
|
|
30
30
|
|
|
31
31
|
def _call[**P, T](
|
|
32
|
-
self, op: Callable[P, T], *args: P.args, **kwargs: P.kwargs
|
|
32
|
+
self, op: Callable[P, T], /, *args: P.args, **kwargs: P.kwargs
|
|
33
33
|
) -> T:
|
|
34
34
|
with self._self_lock:
|
|
35
35
|
return op(*args, **kwargs)
|
|
@@ -3,12 +3,11 @@ __all__ = ['lock_seed', 'trace', 'trace_module', 'whereami']
|
|
|
3
3
|
import gc
|
|
4
4
|
import os
|
|
5
5
|
import random
|
|
6
|
-
import
|
|
7
|
-
from collections.abc import Iterator
|
|
6
|
+
from collections.abc import Callable, Iterator
|
|
8
7
|
from contextlib import suppress
|
|
9
8
|
from inspect import currentframe, getmodule, isfunction
|
|
10
9
|
from itertools import islice
|
|
11
|
-
from types import FrameType
|
|
10
|
+
from types import FrameType, ModuleType
|
|
12
11
|
|
|
13
12
|
import numpy as np
|
|
14
13
|
import wrapt
|
|
@@ -74,12 +73,19 @@ def trace(fn, _, args, kwargs):
|
|
|
74
73
|
return fn(*args, **kwargs)
|
|
75
74
|
|
|
76
75
|
|
|
77
|
-
def _set_trace(
|
|
76
|
+
def _set_trace(
|
|
77
|
+
obj: ModuleType | Callable,
|
|
78
|
+
*,
|
|
79
|
+
seen: set[str] | None = None,
|
|
80
|
+
prefix: str | None = None,
|
|
81
|
+
module: ModuleType | None = None,
|
|
82
|
+
) -> None:
|
|
78
83
|
# TODO: rewrite using unittest.mock
|
|
79
|
-
if isinstance(obj,
|
|
84
|
+
if isinstance(obj, ModuleType):
|
|
80
85
|
if seen is None:
|
|
81
86
|
seen = set()
|
|
82
87
|
prefix = obj.__name__
|
|
88
|
+
assert isinstance(prefix, str)
|
|
83
89
|
if not obj.__name__.startswith(prefix) or obj.__name__ in seen:
|
|
84
90
|
return
|
|
85
91
|
seen.add(obj.__name__)
|
|
@@ -91,6 +97,7 @@ def _set_trace(obj, seen=None, prefix=None, module=None):
|
|
|
91
97
|
if not callable(obj):
|
|
92
98
|
return
|
|
93
99
|
|
|
100
|
+
assert isinstance(module, ModuleType)
|
|
94
101
|
if not hasattr(obj, '__dict__'):
|
|
95
102
|
setattr(module, obj.__qualname__, trace(obj))
|
|
96
103
|
print(f'wraps "{module.__name__}:{obj.__qualname__}"')
|
|
@@ -113,7 +120,7 @@ def _set_trace(obj, seen=None, prefix=None, module=None):
|
|
|
113
120
|
print(f'wraps "{module.__name__}:{obj.__qualname__}.{name}"')
|
|
114
121
|
|
|
115
122
|
|
|
116
|
-
def trace_module(name):
|
|
123
|
+
def trace_module(name: str) -> None:
|
|
117
124
|
"""Enable call logging for each callable inside module name."""
|
|
118
125
|
register_post_import_hook(_set_trace, name)
|
|
119
126
|
|
|
@@ -1,9 +1,29 @@
|
|
|
1
1
|
import asyncio
|
|
2
2
|
import concurrent.futures as cf
|
|
3
|
-
from collections.abc import Hashable, Iterable, Sequence
|
|
3
|
+
from collections.abc import Callable, Hashable, Iterable, Sequence
|
|
4
|
+
from typing import Protocol, overload
|
|
4
5
|
|
|
5
6
|
from ._dev import hide_frame
|
|
6
|
-
from ._types import
|
|
7
|
+
from ._types import Coro, Some
|
|
8
|
+
|
|
9
|
+
type AnyFuture[R] = cf.Future[R] | asyncio.Future[R]
|
|
10
|
+
type Job[T, R] = tuple[T, AnyFuture[R]]
|
|
11
|
+
|
|
12
|
+
type BatchFn[T, R] = Callable[[Sequence[T]], Sequence[R]]
|
|
13
|
+
type ABatchFn[T, R] = Callable[[Sequence[T]], Coro[Sequence[R]]]
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class BatchDecorator(Protocol):
|
|
17
|
+
def __call__[T, R](self, fn: BatchFn[T, R], /) -> BatchFn[T, R]: ...
|
|
18
|
+
class ABatchDecorator(Protocol):
|
|
19
|
+
def __call__[T, R](self, fn: ABatchFn[T, R], /) -> ABatchFn[T, R]: ...
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class AnyBatchDecorator(Protocol):
|
|
23
|
+
@overload
|
|
24
|
+
def __call__[T, R](self, fn: BatchFn[T, R], /) -> BatchFn[T, R]: ...
|
|
25
|
+
@overload
|
|
26
|
+
def __call__[T, R](self, fn: ABatchFn[T, R], /) -> ABatchFn[T, R]: ...
|
|
7
27
|
|
|
8
28
|
|
|
9
29
|
def dispatch[T, R](fn: BatchFn[T, R], *xs: Job[T, R]) -> None:
|
|
@@ -7,11 +7,11 @@ from importlib.machinery import ModuleSpec
|
|
|
7
7
|
from threading import RLock
|
|
8
8
|
from types import ModuleType
|
|
9
9
|
|
|
10
|
-
|
|
10
|
+
from ._types import Callback
|
|
11
11
|
|
|
12
12
|
_INITIALIZED = False
|
|
13
13
|
_LOCK = RLock()
|
|
14
|
-
_HOOKS: dict[str, list[
|
|
14
|
+
_HOOKS: dict[str, list[Callback[ModuleType]]] = {}
|
|
15
15
|
|
|
16
16
|
|
|
17
17
|
class _ImportHookChainedLoader(abc.Loader):
|
|
@@ -71,7 +71,7 @@ class _ImportHookFinder(abc.MetaPathFinder, set[str]):
|
|
|
71
71
|
return None
|
|
72
72
|
|
|
73
73
|
|
|
74
|
-
def register_post_import_hook(hook:
|
|
74
|
+
def register_post_import_hook(hook: Callback[ModuleType], name: str) -> None:
|
|
75
75
|
"""Register a new post import hook for the target module name.
|
|
76
76
|
|
|
77
77
|
This will result in a proxy callback being registered which will defer
|
|
@@ -92,7 +92,7 @@ def register_post_import_hook(hook: _Hook, name: str) -> None:
|
|
|
92
92
|
hook(module)
|
|
93
93
|
|
|
94
94
|
|
|
95
|
-
def when_imported[H:
|
|
95
|
+
def when_imported[H: Callback[ModuleType]](name: str) -> Callable[[H], H]:
|
|
96
96
|
"""Create decorator making a function a post import hook for a module.
|
|
97
97
|
|
|
98
98
|
Simplified version of wrapt.when_imported.
|
|
@@ -40,7 +40,7 @@ from ._dev import hide_frame
|
|
|
40
40
|
from ._more import chunked, ilen
|
|
41
41
|
from ._reduction import move_to_shmem, reducers
|
|
42
42
|
from ._thread_quota import ThreadQuota
|
|
43
|
-
from ._types import Some
|
|
43
|
+
from ._types import Get, Some
|
|
44
44
|
|
|
45
45
|
_TOTAL_CPUS = (
|
|
46
46
|
os.process_cpu_count() if sys.version_info >= (3, 13) else os.cpu_count()
|
|
@@ -115,12 +115,16 @@ def max_cpu_count(upper_bound: int = sys.maxsize, *, mp: bool = False) -> int:
|
|
|
115
115
|
_PATIENCE = 0.01
|
|
116
116
|
|
|
117
117
|
|
|
118
|
-
|
|
118
|
+
class _TimeoutCallable[T](Protocol):
|
|
119
|
+
def __call__(self, *, timeout: float) -> T: ...
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
def _retry_call[T](fn: _TimeoutCallable[T], *exc: type[BaseException]) -> T:
|
|
119
123
|
# See issues
|
|
120
|
-
# https://bugs.python.org/issue29971
|
|
121
|
-
# https://github.com/python/cpython/issues/74157
|
|
122
124
|
# https://github.com/dask/dask/pull/2144#issuecomment-290556996
|
|
123
125
|
# https://github.com/dask/dask/pull/2144/files
|
|
126
|
+
# https://github.com/python/cpython/issues/74157
|
|
127
|
+
# FIXED in py3.15+
|
|
124
128
|
while True:
|
|
125
129
|
try:
|
|
126
130
|
return fn(timeout=_PATIENCE)
|
|
@@ -146,7 +150,7 @@ def _result[T](f: Future[T], cancel: bool = True) -> Some[T] | BaseException:
|
|
|
146
150
|
del f
|
|
147
151
|
|
|
148
152
|
|
|
149
|
-
def _q_get_fn[T](q: _Queue[T]) ->
|
|
153
|
+
def _q_get_fn[T](q: _Queue[T]) -> Get[T]:
|
|
150
154
|
if sys.platform != 'win32':
|
|
151
155
|
return q.get
|
|
152
156
|
return partial(_retry_call, q.get, Empty)
|
|
@@ -377,7 +381,7 @@ def _schedule_auto_v2[F: Future](
|
|
|
377
381
|
def _get_unwrap_iter[T](
|
|
378
382
|
s: ExitStack,
|
|
379
383
|
qsize: int,
|
|
380
|
-
get_done_f:
|
|
384
|
+
get_done_f: Get[Future[T]],
|
|
381
385
|
fs_scheduler: Iterator,
|
|
382
386
|
) -> Iterator[T]:
|
|
383
387
|
with s:
|
|
@@ -13,6 +13,8 @@ from typing import TYPE_CHECKING
|
|
|
13
13
|
from ._debug import whereami
|
|
14
14
|
from ._repr import si, si_bin
|
|
15
15
|
from ._streams import Stream, cumsum, maximum_cumsum
|
|
16
|
+
from ._types import Callback, Get
|
|
17
|
+
|
|
16
18
|
from ._wrap import wrap
|
|
17
19
|
|
|
18
20
|
if TYPE_CHECKING:
|
|
@@ -25,7 +27,7 @@ _THIS = None
|
|
|
25
27
|
|
|
26
28
|
@contextmanager
|
|
27
29
|
def memprof(
|
|
28
|
-
name_or_callback: str |
|
|
30
|
+
name_or_callback: str | Callback[float] | None = None, /
|
|
29
31
|
) -> Iterator[None]:
|
|
30
32
|
global _THIS # noqa: PLW0603
|
|
31
33
|
if _THIS is None:
|
|
@@ -50,9 +52,7 @@ def memprof(
|
|
|
50
52
|
|
|
51
53
|
@contextmanager
|
|
52
54
|
def _timer_callback(
|
|
53
|
-
callback:
|
|
54
|
-
time: Callable[[], int] = perf_counter_ns,
|
|
55
|
-
/,
|
|
55
|
+
callback: Callback[int], time: Get[int] = perf_counter_ns, /
|
|
56
56
|
) -> Iterator[None]:
|
|
57
57
|
begin = time()
|
|
58
58
|
try:
|
|
@@ -63,7 +63,7 @@ def _timer_callback(
|
|
|
63
63
|
|
|
64
64
|
@contextmanager
|
|
65
65
|
def _timer_print(
|
|
66
|
-
name: str | None = None, time:
|
|
66
|
+
name: str | None = None, time: Get[int] = perf_counter_ns, /
|
|
67
67
|
) -> Iterator[None]:
|
|
68
68
|
begin = time()
|
|
69
69
|
try:
|
|
@@ -75,8 +75,8 @@ def _timer_print(
|
|
|
75
75
|
|
|
76
76
|
|
|
77
77
|
def timer(
|
|
78
|
-
name_or_callback: str |
|
|
79
|
-
time:
|
|
78
|
+
name_or_callback: str | Callback[int] | None = None,
|
|
79
|
+
time: Get[int] = perf_counter_ns,
|
|
80
80
|
/,
|
|
81
81
|
*,
|
|
82
82
|
disable: bool = False,
|
|
@@ -109,7 +109,7 @@ class _Profiler:
|
|
|
109
109
|
# Idle time - elapsed I/O time (like time.sleep, lock.acquire, e.t.c.).
|
|
110
110
|
idle_ns: Stream[int, int] = field(default_factory=cumsum)
|
|
111
111
|
|
|
112
|
-
def suspend(self) ->
|
|
112
|
+
def suspend(self) -> Get[None]:
|
|
113
113
|
self.idle_ns.send(-perf_counter_ns())
|
|
114
114
|
return self.resume
|
|
115
115
|
|
|
@@ -2,21 +2,23 @@ from collections.abc import Callable
|
|
|
2
2
|
from contextlib import AbstractContextManager
|
|
3
3
|
from typing import overload
|
|
4
4
|
|
|
5
|
+
from ._types import Callback, Decorator, Get
|
|
6
|
+
|
|
5
7
|
def memprof(
|
|
6
|
-
name_or_callback: str |
|
|
8
|
+
name_or_callback: str | Callback[float] | None = ..., /
|
|
7
9
|
) -> AbstractContextManager[None]: ...
|
|
8
10
|
@overload
|
|
9
11
|
def timer(
|
|
10
12
|
name: str | None = ...,
|
|
11
|
-
time:
|
|
13
|
+
time: Get[int] = ...,
|
|
12
14
|
/,
|
|
13
15
|
*,
|
|
14
16
|
disable: bool = ...,
|
|
15
17
|
) -> AbstractContextManager[None]: ...
|
|
16
18
|
@overload
|
|
17
19
|
def timer(
|
|
18
|
-
callback:
|
|
19
|
-
time:
|
|
20
|
+
callback: Callback[int] | None,
|
|
21
|
+
time: Get[int] = ...,
|
|
20
22
|
/,
|
|
21
23
|
*,
|
|
22
24
|
disable: bool = ...,
|
|
@@ -26,6 +28,4 @@ def time_this[**P, R](
|
|
|
26
28
|
fn: Callable[P, R], /, *, name: str | None = ..., disable: bool = ...
|
|
27
29
|
) -> Callable[P, R]: ...
|
|
28
30
|
@overload
|
|
29
|
-
def time_this
|
|
30
|
-
*, name: str | None = ..., disable: bool = ...
|
|
31
|
-
) -> Callable[[Callable[P, R]], Callable[P, R]]: ...
|
|
31
|
+
def time_this(*, name: str | None = ..., disable: bool = ...) -> Decorator: ...
|
|
@@ -16,6 +16,7 @@ from pathlib import Path
|
|
|
16
16
|
import loky
|
|
17
17
|
|
|
18
18
|
from ._import_hook import when_imported
|
|
19
|
+
from ._types import Callback
|
|
19
20
|
|
|
20
21
|
_SYSTEM_SHM_MIN_SIZE = int(2e9)
|
|
21
22
|
_SYSTEM_SHM = Path('/dev/shm')
|
|
@@ -109,7 +110,7 @@ def _torch_hook(torch) -> None:
|
|
|
109
110
|
|
|
110
111
|
def _dumps(
|
|
111
112
|
obj: object,
|
|
112
|
-
callback:
|
|
113
|
+
callback: Callback[pickle.PickleBuffer] | None = None,
|
|
113
114
|
) -> bytes:
|
|
114
115
|
fp = io.BytesIO()
|
|
115
116
|
p = pickle.Pickler(fp, -1, buffer_callback=callback)
|
|
@@ -1,16 +1,17 @@
|
|
|
1
1
|
__all__ = ['Stream', 'cumsum', 'maximum_cumsum']
|
|
2
2
|
|
|
3
3
|
from collections import deque
|
|
4
|
-
from collections.abc import Callable
|
|
5
4
|
from dataclasses import dataclass
|
|
6
5
|
from itertools import accumulate
|
|
7
6
|
|
|
7
|
+
from ._types import Callback, Get
|
|
8
|
+
|
|
8
9
|
|
|
9
10
|
@dataclass(frozen=True, slots=True, repr=False)
|
|
10
11
|
class Stream[Y, S]:
|
|
11
12
|
init: S
|
|
12
|
-
push:
|
|
13
|
-
pop:
|
|
13
|
+
push: Callback[S]
|
|
14
|
+
pop: Get[Y]
|
|
14
15
|
|
|
15
16
|
def send(self, value: S) -> Y:
|
|
16
17
|
self.push(value)
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
from collections.abc import (
|
|
2
|
+
AsyncIterable,
|
|
3
|
+
AsyncIterator,
|
|
4
|
+
Callable,
|
|
5
|
+
Coroutine,
|
|
6
|
+
Hashable,
|
|
7
|
+
Iterable,
|
|
8
|
+
Iterator,
|
|
9
|
+
)
|
|
10
|
+
from dataclasses import dataclass
|
|
11
|
+
from typing import Any, Literal, Protocol
|
|
12
|
+
|
|
13
|
+
type KeyFn[H: Hashable] = Callable[..., H]
|
|
14
|
+
|
|
15
|
+
type Coro[T] = Coroutine[Any, Any, T]
|
|
16
|
+
type AnyIterable[T] = AsyncIterable[T] | Iterable[T]
|
|
17
|
+
type AnyIterator[T] = AsyncIterator[T] | Iterator[T]
|
|
18
|
+
|
|
19
|
+
type Get[T] = Callable[[], T]
|
|
20
|
+
type Callback[T] = Callable[[T], object]
|
|
21
|
+
|
|
22
|
+
type CachePolicy = Literal['lru', 'mru'] | None
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
@dataclass(frozen=True, slots=True)
|
|
26
|
+
class Some[T]:
|
|
27
|
+
x: T
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class Decorator(Protocol):
|
|
31
|
+
def __call__[**P, R](self, fn: Callable[P, R], /) -> Callable[P, R]: ...
|
|
@@ -7,6 +7,11 @@ from functools import lru_cache
|
|
|
7
7
|
from typing import Self, SupportsInt
|
|
8
8
|
from uuid import UUID, uuid4
|
|
9
9
|
|
|
10
|
+
try:
|
|
11
|
+
from pydantic_core import core_schema
|
|
12
|
+
except ImportError:
|
|
13
|
+
core_schema = None # type: ignore[assignment]
|
|
14
|
+
|
|
10
15
|
ALPHABET = string.digits + string.ascii_letters
|
|
11
16
|
ALPHABET = ''.join(sorted({*ALPHABET} - {*'0O1Il'}))
|
|
12
17
|
|
|
@@ -84,7 +89,8 @@ class Uid(UUID):
|
|
|
84
89
|
|
|
85
90
|
@classmethod # Pydantic 2.x requirement
|
|
86
91
|
def __get_pydantic_core_schema__(cls, _, handler):
|
|
87
|
-
|
|
92
|
+
if core_schema is None:
|
|
93
|
+
raise ImportError('Cannot import `pydantic_core` module')
|
|
88
94
|
|
|
89
95
|
return core_schema.no_info_after_validator_function(
|
|
90
96
|
cls,
|
|
@@ -6,6 +6,8 @@ from typing import Any, Protocol, Self
|
|
|
6
6
|
|
|
7
7
|
from wrapt import ObjectProxy
|
|
8
8
|
|
|
9
|
+
from ._types import Get
|
|
10
|
+
|
|
9
11
|
|
|
10
12
|
def wrap[**P, R](func: Callable[P, R], wrapper: '_Wrapper') -> Callable[P, R]:
|
|
11
13
|
return _Callable(func, wrapper)
|
|
@@ -23,7 +25,7 @@ class _Wrapper(Protocol):
|
|
|
23
25
|
# ...
|
|
24
26
|
# resume()
|
|
25
27
|
# fn(*args, **kwargs)
|
|
26
|
-
def suspend(self) ->
|
|
28
|
+
def suspend(self) -> Get[None]: ...
|
|
27
29
|
|
|
28
30
|
# This one start right before function was called,
|
|
29
31
|
# and stops right after it returned.
|
|
@@ -206,9 +206,14 @@ def _visit_field(
|
|
|
206
206
|
arg_group = parser.add_argument_group(fd.name)
|
|
207
207
|
return fd.name, cls, _visit_nested(arg_group, cls, seen)
|
|
208
208
|
|
|
209
|
-
|
|
209
|
+
vtp = opts['type']
|
|
210
|
+
if (
|
|
211
|
+
isinstance(vtp, type)
|
|
212
|
+
and issubclass(vtp, Iterable)
|
|
213
|
+
and not issubclass(vtp, str)
|
|
214
|
+
):
|
|
210
215
|
msg = (
|
|
211
|
-
'
|
|
216
|
+
'Iterable value types are supported only as generics. '
|
|
212
217
|
f'Got: {vtp}'
|
|
213
218
|
)
|
|
214
219
|
raise TypeError(msg)
|
|
@@ -292,7 +297,7 @@ def parse_args[T](
|
|
|
292
297
|
|
|
293
298
|
|
|
294
299
|
def _import_from_string(qualname: str):
|
|
295
|
-
modname, _, attrname = qualname.partition(
|
|
300
|
+
modname, _, attrname = qualname.partition(':')
|
|
296
301
|
if not modname or not attrname:
|
|
297
302
|
msg = (
|
|
298
303
|
f'Import string "{qualname}" must be '
|
|
@@ -300,13 +305,7 @@ def _import_from_string(qualname: str):
|
|
|
300
305
|
)
|
|
301
306
|
raise ImportError(msg)
|
|
302
307
|
|
|
303
|
-
|
|
304
|
-
mod = importlib.import_module(modname)
|
|
305
|
-
except ModuleNotFoundError as exc:
|
|
306
|
-
if exc.name != modname:
|
|
307
|
-
raise
|
|
308
|
-
msg = f'Could not import module "{modname}".'
|
|
309
|
-
raise ImportError(msg) from None
|
|
308
|
+
mod = importlib.import_module(modname)
|
|
310
309
|
|
|
311
310
|
obj: Any = mod
|
|
312
311
|
try:
|
|
@@ -314,7 +313,7 @@ def _import_from_string(qualname: str):
|
|
|
314
313
|
obj = getattr(obj, a)
|
|
315
314
|
except AttributeError:
|
|
316
315
|
msg = f'Attribute "{attrname}" not found in module "{modname}".'
|
|
317
|
-
raise AttributeError(msg)
|
|
316
|
+
raise AttributeError(msg) from None
|
|
318
317
|
return obj
|
|
319
318
|
|
|
320
319
|
|
|
@@ -2,6 +2,8 @@ from argparse import ArgumentParser
|
|
|
2
2
|
from collections.abc import Callable, Mapping, Sequence
|
|
3
3
|
from typing import Any, overload
|
|
4
4
|
|
|
5
|
+
from ._types import Get
|
|
6
|
+
|
|
5
7
|
@overload
|
|
6
8
|
def arg[T](
|
|
7
9
|
default: T,
|
|
@@ -18,7 +20,7 @@ def arg[T](
|
|
|
18
20
|
@overload
|
|
19
21
|
def arg[T](
|
|
20
22
|
*,
|
|
21
|
-
factory:
|
|
23
|
+
factory: Get[T],
|
|
22
24
|
flag: str = ...,
|
|
23
25
|
init: bool = ...,
|
|
24
26
|
repr: bool = ...,
|
|
@@ -13,27 +13,39 @@ from tqdm.auto import tqdm
|
|
|
13
13
|
|
|
14
14
|
from .. import chunked
|
|
15
15
|
|
|
16
|
+
try:
|
|
17
|
+
from sounddevice import CallbackAbort, CallbackStop, OutputStream
|
|
18
|
+
except ImportError:
|
|
19
|
+
OutputStream = None
|
|
20
|
+
CallbackAbort = CallbackStop = Exception
|
|
21
|
+
|
|
22
|
+
try:
|
|
23
|
+
import soundfile
|
|
24
|
+
except ImportError:
|
|
25
|
+
soundfile = None
|
|
26
|
+
|
|
16
27
|
|
|
17
28
|
def _play(
|
|
18
29
|
arr: np.ndarray, rate: int, blocksize: int = 1024, bufsize: int = 20
|
|
19
30
|
) -> None:
|
|
20
31
|
"""Play audio from array. Crtl-C to interrupt."""
|
|
21
|
-
|
|
32
|
+
if OutputStream is None:
|
|
33
|
+
raise ImportError('Cannot import `sounddevice` module')
|
|
22
34
|
|
|
23
35
|
q: Queue[np.ndarray | None] = Queue(bufsize)
|
|
24
36
|
ev = Event()
|
|
25
37
|
|
|
26
38
|
def callback(out: np.ndarray, *_) -> None:
|
|
27
39
|
if (data := q.get()) is None:
|
|
28
|
-
raise
|
|
40
|
+
raise CallbackAbort
|
|
29
41
|
|
|
30
42
|
size = len(data)
|
|
31
43
|
out[:size] = data
|
|
32
44
|
if size < len(out):
|
|
33
45
|
out[size:] = 0
|
|
34
|
-
raise
|
|
46
|
+
raise CallbackStop
|
|
35
47
|
|
|
36
|
-
stream =
|
|
48
|
+
stream = OutputStream(
|
|
37
49
|
rate, blocksize, callback=callback, finished_callback=ev.set
|
|
38
50
|
)
|
|
39
51
|
|
|
@@ -127,16 +139,16 @@ class Sound[S: np.number]:
|
|
|
127
139
|
|
|
128
140
|
@classmethod
|
|
129
141
|
def load(cls, path: Path | str) -> 'Sound':
|
|
142
|
+
if soundfile is None:
|
|
143
|
+
raise ImportError('Cannot import `soundfile` module')
|
|
130
144
|
_check_fmt(path)
|
|
131
|
-
import soundfile
|
|
132
|
-
|
|
133
145
|
data, rate = soundfile.read(path)
|
|
134
146
|
return cls(data.astype('f'), rate)
|
|
135
147
|
|
|
136
148
|
def save(self, path: Path | str) -> None:
|
|
149
|
+
if soundfile is None:
|
|
150
|
+
raise ImportError('Cannot import `soundfile` module')
|
|
137
151
|
_check_fmt(path)
|
|
138
|
-
import soundfile
|
|
139
|
-
|
|
140
152
|
soundfile.write(path, self.data, self.rate)
|
|
141
153
|
|
|
142
154
|
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
from collections.abc import Callable
|
|
2
2
|
from dataclasses import dataclass
|
|
3
|
+
from pathlib import Path
|
|
3
4
|
from typing import Any, Literal
|
|
4
5
|
|
|
5
6
|
import pytest
|
|
@@ -75,6 +76,11 @@ class NestedAliased: # Forbidden as all field names must be unique
|
|
|
75
76
|
nested: Aliased
|
|
76
77
|
|
|
77
78
|
|
|
79
|
+
@dataclass
|
|
80
|
+
class Custom:
|
|
81
|
+
arg: Path
|
|
82
|
+
|
|
83
|
+
|
|
78
84
|
@pytest.mark.parametrize(
|
|
79
85
|
('argv', 'expected'),
|
|
80
86
|
[
|
|
@@ -91,6 +97,7 @@ class NestedAliased: # Forbidden as all field names must be unique
|
|
|
91
97
|
(['--param', 'world'], Optional_('world')),
|
|
92
98
|
(['value'], Nested('value', Optional_())),
|
|
93
99
|
(['value', '--param', 'pvalue'], Nested('value', Optional_('pvalue'))),
|
|
100
|
+
(['test.txt'], Custom(Path('test.txt'))),
|
|
94
101
|
],
|
|
95
102
|
)
|
|
96
103
|
def test_good_class(argv: list[str], expected: Any):
|
glow-0.15.5/src/glow/_types.py
DELETED
|
@@ -1,53 +0,0 @@
|
|
|
1
|
-
import asyncio
|
|
2
|
-
import concurrent.futures as cf
|
|
3
|
-
from collections.abc import (
|
|
4
|
-
AsyncIterable,
|
|
5
|
-
AsyncIterator,
|
|
6
|
-
Callable,
|
|
7
|
-
Coroutine,
|
|
8
|
-
Hashable,
|
|
9
|
-
Iterable,
|
|
10
|
-
Iterator,
|
|
11
|
-
Sequence,
|
|
12
|
-
)
|
|
13
|
-
from dataclasses import dataclass
|
|
14
|
-
from typing import Any, Literal, Protocol, overload
|
|
15
|
-
|
|
16
|
-
type KeyFn[H: Hashable] = Callable[..., H]
|
|
17
|
-
|
|
18
|
-
type Coro[T] = Coroutine[Any, Any, T]
|
|
19
|
-
type AnyIterable[T] = AsyncIterable[T] | Iterable[T]
|
|
20
|
-
type AnyIterator[T] = AsyncIterator[T] | Iterator[T]
|
|
21
|
-
|
|
22
|
-
type BatchFn[T, R] = Callable[[Sequence[T]], Sequence[R]]
|
|
23
|
-
type ABatchFn[T, R] = Callable[[Sequence[T]], Coro[Sequence[R]]]
|
|
24
|
-
|
|
25
|
-
type AnyFuture[R] = cf.Future[R] | asyncio.Future[R]
|
|
26
|
-
type Job[T, R] = tuple[T, AnyFuture[R]]
|
|
27
|
-
|
|
28
|
-
type Get[T] = Callable[[], T]
|
|
29
|
-
type Callback[T] = Callable[[T], object]
|
|
30
|
-
|
|
31
|
-
type CachePolicy = Literal['lru', 'mru'] | None
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
@dataclass(frozen=True, slots=True)
|
|
35
|
-
class Some[T]:
|
|
36
|
-
x: T
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
class Decorator(Protocol):
|
|
40
|
-
def __call__[**P, R](self, fn: Callable[P, R], /) -> Callable[P, R]: ...
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
class BatchDecorator(Protocol):
|
|
44
|
-
def __call__[T, R](self, fn: BatchFn[T, R], /) -> BatchFn[T, R]: ...
|
|
45
|
-
class ABatchDecorator(Protocol):
|
|
46
|
-
def __call__[T, R](self, fn: ABatchFn[T, R], /) -> ABatchFn[T, R]: ...
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
class AnyBatchDecorator(Protocol):
|
|
50
|
-
@overload
|
|
51
|
-
def __call__[T, R](self, fn: BatchFn[T, R], /) -> BatchFn[T, R]: ...
|
|
52
|
-
@overload
|
|
53
|
-
def __call__[T, R](self, fn: ABatchFn[T, R], /) -> ABatchFn[T, R]: ...
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|