ez-a-sync 0.32.29__cp310-cp310-win32.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ez-a-sync might be problematic. Click here for more details.
- a_sync/ENVIRONMENT_VARIABLES.py +42 -0
- a_sync/__init__.pxd +2 -0
- a_sync/__init__.py +145 -0
- a_sync/_smart.c +22803 -0
- a_sync/_smart.cp310-win32.pyd +0 -0
- a_sync/_smart.pxd +2 -0
- a_sync/_smart.pyi +202 -0
- a_sync/_smart.pyx +674 -0
- a_sync/_typing.py +258 -0
- a_sync/a_sync/__init__.py +60 -0
- a_sync/a_sync/_descriptor.c +20528 -0
- a_sync/a_sync/_descriptor.cp310-win32.pyd +0 -0
- a_sync/a_sync/_descriptor.pyi +33 -0
- a_sync/a_sync/_descriptor.pyx +422 -0
- a_sync/a_sync/_flags.c +6074 -0
- a_sync/a_sync/_flags.cp310-win32.pyd +0 -0
- a_sync/a_sync/_flags.pxd +3 -0
- a_sync/a_sync/_flags.pyx +92 -0
- a_sync/a_sync/_helpers.c +14521 -0
- a_sync/a_sync/_helpers.cp310-win32.pyd +0 -0
- a_sync/a_sync/_helpers.pxd +3 -0
- a_sync/a_sync/_helpers.pyi +10 -0
- a_sync/a_sync/_helpers.pyx +167 -0
- a_sync/a_sync/_kwargs.c +12194 -0
- a_sync/a_sync/_kwargs.cp310-win32.pyd +0 -0
- a_sync/a_sync/_kwargs.pxd +2 -0
- a_sync/a_sync/_kwargs.pyx +64 -0
- a_sync/a_sync/_meta.py +210 -0
- a_sync/a_sync/abstract.c +12411 -0
- a_sync/a_sync/abstract.cp310-win32.pyd +0 -0
- a_sync/a_sync/abstract.pyi +141 -0
- a_sync/a_sync/abstract.pyx +221 -0
- a_sync/a_sync/base.c +14932 -0
- a_sync/a_sync/base.cp310-win32.pyd +0 -0
- a_sync/a_sync/base.pyi +60 -0
- a_sync/a_sync/base.pyx +271 -0
- a_sync/a_sync/config.py +168 -0
- a_sync/a_sync/decorator.py +651 -0
- a_sync/a_sync/flags.c +5272 -0
- a_sync/a_sync/flags.cp310-win32.pyd +0 -0
- a_sync/a_sync/flags.pxd +72 -0
- a_sync/a_sync/flags.pyi +74 -0
- a_sync/a_sync/flags.pyx +72 -0
- a_sync/a_sync/function.c +37846 -0
- a_sync/a_sync/function.cp310-win32.pyd +0 -0
- a_sync/a_sync/function.pxd +28 -0
- a_sync/a_sync/function.pyi +571 -0
- a_sync/a_sync/function.pyx +1381 -0
- a_sync/a_sync/method.c +29774 -0
- a_sync/a_sync/method.cp310-win32.pyd +0 -0
- a_sync/a_sync/method.pxd +9 -0
- a_sync/a_sync/method.pyi +525 -0
- a_sync/a_sync/method.pyx +1023 -0
- a_sync/a_sync/modifiers/__init__.pxd +1 -0
- a_sync/a_sync/modifiers/__init__.py +101 -0
- a_sync/a_sync/modifiers/cache/__init__.py +160 -0
- a_sync/a_sync/modifiers/cache/memory.py +165 -0
- a_sync/a_sync/modifiers/limiter.py +132 -0
- a_sync/a_sync/modifiers/manager.c +16149 -0
- a_sync/a_sync/modifiers/manager.cp310-win32.pyd +0 -0
- a_sync/a_sync/modifiers/manager.pxd +5 -0
- a_sync/a_sync/modifiers/manager.pyi +219 -0
- a_sync/a_sync/modifiers/manager.pyx +299 -0
- a_sync/a_sync/modifiers/semaphores.py +173 -0
- a_sync/a_sync/property.c +27260 -0
- a_sync/a_sync/property.cp310-win32.pyd +0 -0
- a_sync/a_sync/property.pyi +376 -0
- a_sync/a_sync/property.pyx +819 -0
- a_sync/a_sync/singleton.py +63 -0
- a_sync/aliases.py +3 -0
- a_sync/async_property/__init__.pxd +1 -0
- a_sync/async_property/__init__.py +1 -0
- a_sync/async_property/cached.c +20386 -0
- a_sync/async_property/cached.cp310-win32.pyd +0 -0
- a_sync/async_property/cached.pxd +10 -0
- a_sync/async_property/cached.pyi +45 -0
- a_sync/async_property/cached.pyx +178 -0
- a_sync/async_property/proxy.c +34654 -0
- a_sync/async_property/proxy.cp310-win32.pyd +0 -0
- a_sync/async_property/proxy.pxd +2 -0
- a_sync/async_property/proxy.pyi +124 -0
- a_sync/async_property/proxy.pyx +474 -0
- a_sync/asyncio/__init__.pxd +6 -0
- a_sync/asyncio/__init__.py +164 -0
- a_sync/asyncio/as_completed.c +18841 -0
- a_sync/asyncio/as_completed.cp310-win32.pyd +0 -0
- a_sync/asyncio/as_completed.pxd +8 -0
- a_sync/asyncio/as_completed.pyi +109 -0
- a_sync/asyncio/as_completed.pyx +269 -0
- a_sync/asyncio/create_task.c +15902 -0
- a_sync/asyncio/create_task.cp310-win32.pyd +0 -0
- a_sync/asyncio/create_task.pxd +2 -0
- a_sync/asyncio/create_task.pyi +51 -0
- a_sync/asyncio/create_task.pyx +271 -0
- a_sync/asyncio/gather.c +16679 -0
- a_sync/asyncio/gather.cp310-win32.pyd +0 -0
- a_sync/asyncio/gather.pyi +107 -0
- a_sync/asyncio/gather.pyx +218 -0
- a_sync/asyncio/igather.c +12676 -0
- a_sync/asyncio/igather.cp310-win32.pyd +0 -0
- a_sync/asyncio/igather.pxd +1 -0
- a_sync/asyncio/igather.pyi +7 -0
- a_sync/asyncio/igather.pyx +182 -0
- a_sync/asyncio/sleep.c +9593 -0
- a_sync/asyncio/sleep.cp310-win32.pyd +0 -0
- a_sync/asyncio/sleep.pyi +14 -0
- a_sync/asyncio/sleep.pyx +49 -0
- a_sync/debugging.c +15362 -0
- a_sync/debugging.cp310-win32.pyd +0 -0
- a_sync/debugging.pyi +76 -0
- a_sync/debugging.pyx +107 -0
- a_sync/exceptions.c +13312 -0
- a_sync/exceptions.cp310-win32.pyd +0 -0
- a_sync/exceptions.pyi +376 -0
- a_sync/exceptions.pyx +446 -0
- a_sync/executor.py +619 -0
- a_sync/functools.c +12738 -0
- a_sync/functools.cp310-win32.pyd +0 -0
- a_sync/functools.pxd +7 -0
- a_sync/functools.pyi +33 -0
- a_sync/functools.pyx +139 -0
- a_sync/future.py +1497 -0
- a_sync/iter.c +37271 -0
- a_sync/iter.cp310-win32.pyd +0 -0
- a_sync/iter.pxd +11 -0
- a_sync/iter.pyi +370 -0
- a_sync/iter.pyx +981 -0
- a_sync/primitives/__init__.pxd +1 -0
- a_sync/primitives/__init__.py +53 -0
- a_sync/primitives/_debug.c +15757 -0
- a_sync/primitives/_debug.cp310-win32.pyd +0 -0
- a_sync/primitives/_debug.pxd +12 -0
- a_sync/primitives/_debug.pyi +52 -0
- a_sync/primitives/_debug.pyx +223 -0
- a_sync/primitives/_loggable.c +11529 -0
- a_sync/primitives/_loggable.cp310-win32.pyd +0 -0
- a_sync/primitives/_loggable.pxd +4 -0
- a_sync/primitives/_loggable.pyi +66 -0
- a_sync/primitives/_loggable.pyx +102 -0
- a_sync/primitives/locks/__init__.pxd +8 -0
- a_sync/primitives/locks/__init__.py +17 -0
- a_sync/primitives/locks/counter.c +17679 -0
- a_sync/primitives/locks/counter.cp310-win32.pyd +0 -0
- a_sync/primitives/locks/counter.pxd +12 -0
- a_sync/primitives/locks/counter.pyi +151 -0
- a_sync/primitives/locks/counter.pyx +260 -0
- a_sync/primitives/locks/event.c +17063 -0
- a_sync/primitives/locks/event.cp310-win32.pyd +0 -0
- a_sync/primitives/locks/event.pxd +22 -0
- a_sync/primitives/locks/event.pyi +43 -0
- a_sync/primitives/locks/event.pyx +185 -0
- a_sync/primitives/locks/prio_semaphore.c +25590 -0
- a_sync/primitives/locks/prio_semaphore.cp310-win32.pyd +0 -0
- a_sync/primitives/locks/prio_semaphore.pxd +25 -0
- a_sync/primitives/locks/prio_semaphore.pyi +217 -0
- a_sync/primitives/locks/prio_semaphore.pyx +597 -0
- a_sync/primitives/locks/semaphore.c +26509 -0
- a_sync/primitives/locks/semaphore.cp310-win32.pyd +0 -0
- a_sync/primitives/locks/semaphore.pxd +21 -0
- a_sync/primitives/locks/semaphore.pyi +197 -0
- a_sync/primitives/locks/semaphore.pyx +454 -0
- a_sync/primitives/queue.py +1022 -0
- a_sync/py.typed +0 -0
- a_sync/sphinx/__init__.py +3 -0
- a_sync/sphinx/ext.py +289 -0
- a_sync/task.py +932 -0
- a_sync/utils/__init__.py +105 -0
- a_sync/utils/iterators.py +297 -0
- a_sync/utils/repr.c +15799 -0
- a_sync/utils/repr.cp310-win32.pyd +0 -0
- a_sync/utils/repr.pyi +2 -0
- a_sync/utils/repr.pyx +73 -0
- ez_a_sync-0.32.29.dist-info/METADATA +367 -0
- ez_a_sync-0.32.29.dist-info/RECORD +177 -0
- ez_a_sync-0.32.29.dist-info/WHEEL +5 -0
- ez_a_sync-0.32.29.dist-info/licenses/LICENSE.txt +17 -0
- ez_a_sync-0.32.29.dist-info/top_level.txt +1 -0
a_sync/task.py
ADDED
|
@@ -0,0 +1,932 @@
|
|
|
1
|
+
"""
|
|
2
|
+
This module provides asynchronous task management utilities, specifically focused on creating and handling mappings of tasks.
|
|
3
|
+
|
|
4
|
+
The main components include:
|
|
5
|
+
- TaskMapping: A class for managing and asynchronously generating tasks based on input iterables.
|
|
6
|
+
- TaskMappingKeys: A view to asynchronously iterate over the keys of a TaskMapping.
|
|
7
|
+
- TaskMappingValues: A view to asynchronously iterate over the values of a TaskMapping.
|
|
8
|
+
- TaskMappingItems: A view to asynchronously iterate over the items (key-value pairs) of a TaskMapping.
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
from asyncio import FIRST_COMPLETED, Future, Task, sleep, wait
|
|
12
|
+
from functools import wraps
|
|
13
|
+
from inspect import getfullargspec, isawaitable
|
|
14
|
+
from itertools import filterfalse
|
|
15
|
+
from logging import getLogger
|
|
16
|
+
from weakref import WeakKeyDictionary, proxy
|
|
17
|
+
|
|
18
|
+
from a_sync import exceptions
|
|
19
|
+
from a_sync._typing import *
|
|
20
|
+
from a_sync.a_sync._kwargs import _get_flag_name
|
|
21
|
+
from a_sync.a_sync.base import ASyncGenericBase
|
|
22
|
+
from a_sync.a_sync.function import ASyncFunction
|
|
23
|
+
from a_sync.a_sync.method import (
|
|
24
|
+
ASyncBoundMethod,
|
|
25
|
+
ASyncMethodDescriptor,
|
|
26
|
+
ASyncMethodDescriptorSyncDefault,
|
|
27
|
+
)
|
|
28
|
+
from a_sync.a_sync.property import _ASyncPropertyDescriptorBase
|
|
29
|
+
from a_sync.asyncio import as_completed, create_task, gather
|
|
30
|
+
from a_sync.asyncio.gather import Excluder
|
|
31
|
+
from a_sync.asyncio.sleep import sleep0 as yield_to_loop
|
|
32
|
+
from a_sync.functools import cached_property_unsafe
|
|
33
|
+
from a_sync.iter import ASyncIterator, ASyncGeneratorFunction, ASyncSorter
|
|
34
|
+
from a_sync.primitives.locks import Event
|
|
35
|
+
from a_sync.primitives.queue import Queue, ProcessingQueue
|
|
36
|
+
from a_sync.utils.iterators import as_yielded, exhaust_iterator
|
|
37
|
+
from a_sync.utils.repr import repr_trunc
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
logger = getLogger(__name__)
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
MappingFn = Callable[Concatenate[K, P], Awaitable[V]]
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
_args = WeakKeyDictionary()
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class TaskMapping(DefaultDict[K, "Task[V]"], AsyncIterable[Tuple[K, V]]):
|
|
50
|
+
"""
|
|
51
|
+
A mapping of keys to asynchronous tasks with additional functionality.
|
|
52
|
+
|
|
53
|
+
`TaskMapping` is a specialized dictionary that maps keys to `asyncio` Tasks. It provides
|
|
54
|
+
convenient methods for creating, managing, and iterating over these tasks asynchronously.
|
|
55
|
+
|
|
56
|
+
Tasks are created automatically for each key using a provided function. You cannot manually set items in a `TaskMapping` using dictionary-like syntax.
|
|
57
|
+
|
|
58
|
+
Example:
|
|
59
|
+
>>> async def fetch_data(url: str) -> str:
|
|
60
|
+
... async with aiohttp.ClientSession() as session:
|
|
61
|
+
... async with session.get(url) as response:
|
|
62
|
+
... return await response.text()
|
|
63
|
+
...
|
|
64
|
+
>>> tasks = TaskMapping(fetch_data, ['http://example.com', 'https://www.python.org'], name='url_fetcher', concurrency=5)
|
|
65
|
+
>>> async for key, result in tasks:
|
|
66
|
+
... print(f"Data for {key}: {result}")
|
|
67
|
+
...
|
|
68
|
+
Data for python.org: http://python.org
|
|
69
|
+
Data for example.com: http://example.com
|
|
70
|
+
|
|
71
|
+
Note:
|
|
72
|
+
You cannot manually set items in a `TaskMapping` using dictionary-like syntax. Tasks are created and managed internally.
|
|
73
|
+
|
|
74
|
+
See Also:
|
|
75
|
+
- :class:`asyncio.Task`
|
|
76
|
+
- :func:`asyncio.create_task`
|
|
77
|
+
- :func:`a_sync.asyncio.create_task`
|
|
78
|
+
"""
|
|
79
|
+
|
|
80
|
+
concurrency: Optional[int] = None
|
|
81
|
+
"The max number of tasks that will run at one time."
|
|
82
|
+
|
|
83
|
+
_destroyed: bool = False
|
|
84
|
+
"Boolean indicating whether his mapping has been consumed and is no longer usable for aggregations."
|
|
85
|
+
|
|
86
|
+
_init_loader: Optional["Task[None]"] = None
|
|
87
|
+
"An asyncio Task used to preload values from the iterables."
|
|
88
|
+
|
|
89
|
+
_init_loader_next: Optional[Callable[[], Awaitable[Tuple[Tuple[K, "Task[V]"]]]]] = None
|
|
90
|
+
"A coro function that blocks until the _init_loader starts a new task(s), and then returns a `Tuple[Tuple[K, Task[V]]]` with all of the new tasks and the keys that started them."
|
|
91
|
+
|
|
92
|
+
_name: Optional[str] = None
|
|
93
|
+
"Optional name for tasks created by this mapping."
|
|
94
|
+
|
|
95
|
+
_next: Event = None
|
|
96
|
+
"An asyncio Event that indicates the next result is ready"
|
|
97
|
+
|
|
98
|
+
_wrapped_func_kwargs: Dict[str, Any] = {}
|
|
99
|
+
"Additional keyword arguments passed to `_wrapped_func`."
|
|
100
|
+
|
|
101
|
+
__iterables__: Tuple[AnyIterableOrAwaitableIterable[K], ...] = ()
|
|
102
|
+
"The original iterables, if any, used to initialize this mapping."
|
|
103
|
+
|
|
104
|
+
__init_loader_coro: Optional[Awaitable[None]] = None
|
|
105
|
+
"""An optional asyncio Coroutine to be run by the `_init_loader`"""
|
|
106
|
+
|
|
107
|
+
__slots__ = "_wrapped_func", "__wrapped__", "__dict__", "__weakref__"
|
|
108
|
+
|
|
109
|
+
# NOTE: maybe since we use so many classvars here we are better off getting rid of slots
|
|
110
|
+
def __init__(
|
|
111
|
+
self,
|
|
112
|
+
wrapped_func: MappingFn[K, P, V] = None,
|
|
113
|
+
*iterables: AnyIterableOrAwaitableIterable[K],
|
|
114
|
+
name: str = "",
|
|
115
|
+
concurrency: Optional[int] = None,
|
|
116
|
+
**wrapped_func_kwargs: P.kwargs,
|
|
117
|
+
) -> None:
|
|
118
|
+
"""
|
|
119
|
+
Initialize a TaskMapping instance.
|
|
120
|
+
|
|
121
|
+
Args:
|
|
122
|
+
wrapped_func: A callable that takes a key and additional parameters and returns an Awaitable.
|
|
123
|
+
*iterables: Any number of iterables whose elements will be used as keys for task generation.
|
|
124
|
+
name: An optional name for the tasks created by this mapping.
|
|
125
|
+
concurrency: Maximum number of tasks to run concurrently.
|
|
126
|
+
**wrapped_func_kwargs: Additional keyword arguments to be passed to wrapped_func.
|
|
127
|
+
|
|
128
|
+
Example:
|
|
129
|
+
async def process_item(item: int) -> int:
|
|
130
|
+
await asyncio.sleep(1)
|
|
131
|
+
return item * 2
|
|
132
|
+
|
|
133
|
+
task_map = TaskMapping(process_item, [1, 2, 3], concurrency=2)
|
|
134
|
+
"""
|
|
135
|
+
|
|
136
|
+
if concurrency:
|
|
137
|
+
self.concurrency = concurrency
|
|
138
|
+
|
|
139
|
+
self.__wrapped__ = wrapped_func
|
|
140
|
+
"The original callable used to initialize this mapping without any modifications."
|
|
141
|
+
|
|
142
|
+
if iterables:
|
|
143
|
+
self.__iterables__ = iterables
|
|
144
|
+
|
|
145
|
+
wrapped_func = _unwrap(wrapped_func)
|
|
146
|
+
self._wrapped_func = wrapped_func
|
|
147
|
+
"The function used to create tasks for each key."
|
|
148
|
+
|
|
149
|
+
if isinstance(wrapped_func, ASyncMethodDescriptor) and not _get_flag_name(
|
|
150
|
+
wrapped_func_kwargs
|
|
151
|
+
):
|
|
152
|
+
wrapped_func_kwargs["sync"] = False
|
|
153
|
+
if wrapped_func_kwargs:
|
|
154
|
+
self._wrapped_func_kwargs = wrapped_func_kwargs
|
|
155
|
+
|
|
156
|
+
if name:
|
|
157
|
+
self._name = name
|
|
158
|
+
|
|
159
|
+
self._next = Event(name=f"{self} `_next`")
|
|
160
|
+
|
|
161
|
+
if iterables:
|
|
162
|
+
|
|
163
|
+
set_next = self._next.set
|
|
164
|
+
clear_next = self._next.clear
|
|
165
|
+
|
|
166
|
+
@wraps(wrapped_func)
|
|
167
|
+
async def _wrapped_set_next(
|
|
168
|
+
*args: P.args, __a_sync_recursion: int = 0, **kwargs: P.kwargs
|
|
169
|
+
) -> V:
|
|
170
|
+
try:
|
|
171
|
+
return await wrapped_func(*args, **kwargs)
|
|
172
|
+
except exceptions.SyncModeInAsyncContextError as e:
|
|
173
|
+
e.args = *e.args, f"wrapped:{self.__wrapped__}"
|
|
174
|
+
raise
|
|
175
|
+
except TypeError as e:
|
|
176
|
+
if (
|
|
177
|
+
args is None
|
|
178
|
+
or __a_sync_recursion > 2
|
|
179
|
+
or not (
|
|
180
|
+
str(e).startswith(wrapped_func.__name__)
|
|
181
|
+
and "got multiple values for argument" in str(e)
|
|
182
|
+
)
|
|
183
|
+
):
|
|
184
|
+
raise
|
|
185
|
+
|
|
186
|
+
# NOTE: args ordering is clashing with provided kwargs. We can handle this in a hacky way.
|
|
187
|
+
# TODO: perform this check earlier and pre-prepare the args/kwargs ordering
|
|
188
|
+
try:
|
|
189
|
+
argspec = _args[self.__wrapped__]
|
|
190
|
+
except KeyError:
|
|
191
|
+
argspec = _args[self.__wrapped__] = getfullargspec(self.__wrapped__).args
|
|
192
|
+
|
|
193
|
+
new_args = list(args)
|
|
194
|
+
new_kwargs = dict(kwargs)
|
|
195
|
+
try:
|
|
196
|
+
for i, arg in enumerate(argspec):
|
|
197
|
+
if arg in kwargs:
|
|
198
|
+
new_args.insert(i, new_kwargs.pop(arg))
|
|
199
|
+
else:
|
|
200
|
+
break
|
|
201
|
+
return await _wrapped_set_next(
|
|
202
|
+
*new_args,
|
|
203
|
+
**new_kwargs,
|
|
204
|
+
__a_sync_recursion=__a_sync_recursion + 1,
|
|
205
|
+
)
|
|
206
|
+
except TypeError as e2:
|
|
207
|
+
raise (
|
|
208
|
+
e.with_traceback(e.__traceback__)
|
|
209
|
+
if str(e2) == "unsupported callable"
|
|
210
|
+
else e2.with_traceback(e2.__traceback__)
|
|
211
|
+
)
|
|
212
|
+
finally:
|
|
213
|
+
set_next()
|
|
214
|
+
clear_next()
|
|
215
|
+
|
|
216
|
+
self._wrapped_func = _wrapped_set_next
|
|
217
|
+
init_loader_queue: Queue[Tuple[K, "Future[V]"]] = Queue()
|
|
218
|
+
self.__init_loader_coro = exhaust_iterator(
|
|
219
|
+
self._start_tasks_for_iterables(*iterables), queue=init_loader_queue
|
|
220
|
+
)
|
|
221
|
+
self._init_loader_next = init_loader_queue.get_all
|
|
222
|
+
|
|
223
|
+
try:
|
|
224
|
+
self._init_loader
|
|
225
|
+
except _NoRunningLoop:
|
|
226
|
+
# its okay if we get this exception, we can start the task as soon as the loop starts
|
|
227
|
+
pass
|
|
228
|
+
|
|
229
|
+
def __repr__(self) -> str:
|
|
230
|
+
return f"<{type(self).__name__} for {self._wrapped_func} kwargs={self._wrapped_func_kwargs} tasks={len(self)} at {hex(id(self))}>"
|
|
231
|
+
|
|
232
|
+
def __hash__(self) -> int:
|
|
233
|
+
return id(self)
|
|
234
|
+
|
|
235
|
+
def __setitem__(self, item: Any, value: Any) -> None:
|
|
236
|
+
raise NotImplementedError("You cannot manually set items in a TaskMapping")
|
|
237
|
+
|
|
238
|
+
def __getitem__(self, item: K) -> "Task[V]":
|
|
239
|
+
try:
|
|
240
|
+
return dict.__getitem__(self, item)
|
|
241
|
+
except KeyError:
|
|
242
|
+
return self.__start_task(item)
|
|
243
|
+
|
|
244
|
+
def __await__(self) -> Generator[Any, None, Dict[K, V]]:
|
|
245
|
+
"""Wait for all tasks to complete and return a dictionary of the results."""
|
|
246
|
+
return self.gather(sync=False).__await__()
|
|
247
|
+
|
|
248
|
+
async def __aiter__(self, pop: bool = False) -> AsyncIterator[Tuple[K, V]]:
|
|
249
|
+
# sourcery skip: hoist-loop-from-if, hoist-similar-statement-from-if, hoist-statement-from-if
|
|
250
|
+
"""Asynchronously iterate through all key-task pairs, yielding the key-result pair as each task completes."""
|
|
251
|
+
|
|
252
|
+
self._if_pop_check_destroyed(pop)
|
|
253
|
+
|
|
254
|
+
# if you inited the TaskMapping with some iterators, we will load those
|
|
255
|
+
yielded = set()
|
|
256
|
+
add_yielded = yielded.add
|
|
257
|
+
try:
|
|
258
|
+
if self._init_loader is None:
|
|
259
|
+
# if you didn't init the TaskMapping with iterators and you didn't start any tasks manually, we should fail
|
|
260
|
+
self._raise_if_empty()
|
|
261
|
+
else:
|
|
262
|
+
while not self._init_loader.done():
|
|
263
|
+
await self._wait_for_next_key()
|
|
264
|
+
while unyielded := tuple(key for key in self if key not in yielded):
|
|
265
|
+
if ready := tuple(key for key in unyielded if self[key].done()):
|
|
266
|
+
if pop:
|
|
267
|
+
self_pop = self.pop
|
|
268
|
+
for key in ready:
|
|
269
|
+
yield key, self_pop(key).result()
|
|
270
|
+
add_yielded(key)
|
|
271
|
+
else:
|
|
272
|
+
for key in ready:
|
|
273
|
+
yield key, self[key].result()
|
|
274
|
+
add_yielded(key)
|
|
275
|
+
else:
|
|
276
|
+
await self._next.wait()
|
|
277
|
+
# loader is already done by this point, but we need to check for exceptions
|
|
278
|
+
await self._init_loader
|
|
279
|
+
# if there are any tasks that still need to complete, we need to await them and yield them
|
|
280
|
+
if unyielded := {key: self[key] for key in self if key not in yielded}:
|
|
281
|
+
if pop:
|
|
282
|
+
self_pop = self.pop
|
|
283
|
+
async for key, value in as_completed(unyielded, aiter=True):
|
|
284
|
+
self_pop(key)
|
|
285
|
+
yield key, value
|
|
286
|
+
else:
|
|
287
|
+
async for key, value in as_completed(unyielded, aiter=True):
|
|
288
|
+
yield key, value
|
|
289
|
+
finally:
|
|
290
|
+
await self._if_pop_clear(pop)
|
|
291
|
+
|
|
292
|
+
def __delitem__(self, item: K) -> None:
|
|
293
|
+
task_or_fut = dict.__getitem__(self, item)
|
|
294
|
+
if not task_or_fut.done():
|
|
295
|
+
task_or_fut.cancel()
|
|
296
|
+
dict.__delitem__(self, item)
|
|
297
|
+
|
|
298
|
+
def keys(self, pop: bool = False) -> "TaskMappingKeys[K, V]":
|
|
299
|
+
return TaskMappingKeys(dict.keys(self), self, pop=pop)
|
|
300
|
+
|
|
301
|
+
def values(self, pop: bool = False) -> "TaskMappingValues[K, V]":
|
|
302
|
+
return TaskMappingValues(dict.values(self), self, pop=pop)
|
|
303
|
+
|
|
304
|
+
def items(self, pop: bool = False) -> "TaskMappingValues[K, V]":
|
|
305
|
+
return TaskMappingItems(dict.items(self), self, pop=pop)
|
|
306
|
+
|
|
307
|
+
async def close(self) -> None:
|
|
308
|
+
await self._if_pop_clear(True)
|
|
309
|
+
|
|
310
|
+
@ASyncGeneratorFunction
|
|
311
|
+
async def map(
|
|
312
|
+
self,
|
|
313
|
+
*iterables: AnyIterableOrAwaitableIterable[K],
|
|
314
|
+
pop: bool = True,
|
|
315
|
+
yields: Literal["keys", "both"] = "both",
|
|
316
|
+
) -> AsyncIterator[Tuple[K, V]]:
|
|
317
|
+
# sourcery skip: hoist-similar-statement-from-if
|
|
318
|
+
"""
|
|
319
|
+
Asynchronously map iterables to tasks and yield their results.
|
|
320
|
+
|
|
321
|
+
Args:
|
|
322
|
+
*iterables: Iterables to map over.
|
|
323
|
+
pop: Whether to remove tasks from the internal storage once they are completed.
|
|
324
|
+
yields: Whether to yield 'keys', 'values', or 'both' (key-value pairs).
|
|
325
|
+
|
|
326
|
+
Yields:
|
|
327
|
+
Depending on `yields`, either keys, values,
|
|
328
|
+
or tuples of key-value pairs representing the results of completed tasks.
|
|
329
|
+
|
|
330
|
+
Example:
|
|
331
|
+
async def process_item(item: int) -> int:
|
|
332
|
+
await asyncio.sleep(1)
|
|
333
|
+
return item * 2
|
|
334
|
+
|
|
335
|
+
task_map = TaskMapping(process_item)
|
|
336
|
+
async for key, result in task_map.map([1, 2, 3]):
|
|
337
|
+
print(f"Processed {key}: {result}")
|
|
338
|
+
"""
|
|
339
|
+
self._if_pop_check_destroyed(pop)
|
|
340
|
+
|
|
341
|
+
# make sure the init loader is started if needed
|
|
342
|
+
init_loader = self._init_loader
|
|
343
|
+
if iterables and init_loader:
|
|
344
|
+
raise ValueError(
|
|
345
|
+
"You cannot pass `iterables` to map if the TaskMapping was initialized with an (a)iterable."
|
|
346
|
+
)
|
|
347
|
+
|
|
348
|
+
try:
|
|
349
|
+
if iterables:
|
|
350
|
+
self._raise_if_not_empty()
|
|
351
|
+
|
|
352
|
+
def callback(t: Task):
|
|
353
|
+
self._next.set()
|
|
354
|
+
|
|
355
|
+
try:
|
|
356
|
+
async for k, t in self._tasks_for_iterables(*iterables):
|
|
357
|
+
t.add_done_callback(callback)
|
|
358
|
+
if self._next.is_set():
|
|
359
|
+
async for key, value in self.yield_completed(pop=pop):
|
|
360
|
+
yield _yield(key, value, yields)
|
|
361
|
+
self._next.clear()
|
|
362
|
+
except _EmptySequenceError:
|
|
363
|
+
if len(iterables) > 1:
|
|
364
|
+
# TODO gotta handle this situation
|
|
365
|
+
raise exceptions.EmptySequenceError(
|
|
366
|
+
"bob needs to code something so you can do this, go tell him"
|
|
367
|
+
) from None
|
|
368
|
+
# just pass thru
|
|
369
|
+
finally:
|
|
370
|
+
self._next.clear()
|
|
371
|
+
|
|
372
|
+
elif init_loader:
|
|
373
|
+
# check for exceptions if you passed an iterable(s) into the class init
|
|
374
|
+
await init_loader
|
|
375
|
+
|
|
376
|
+
else:
|
|
377
|
+
self._raise_if_empty(
|
|
378
|
+
"You must either initialize your TaskMapping with an iterable(s) or provide them during your call to map"
|
|
379
|
+
)
|
|
380
|
+
|
|
381
|
+
if self:
|
|
382
|
+
if pop:
|
|
383
|
+
self_pop = self.pop
|
|
384
|
+
async for key, value in as_completed(self, aiter=True):
|
|
385
|
+
self_pop(key)
|
|
386
|
+
yield _yield(key, value, yields)
|
|
387
|
+
else:
|
|
388
|
+
async for key, value in as_completed(self, aiter=True):
|
|
389
|
+
yield _yield(key, value, yields)
|
|
390
|
+
finally:
|
|
391
|
+
await self._if_pop_clear(pop)
|
|
392
|
+
|
|
393
|
+
@ASyncMethodDescriptorSyncDefault
|
|
394
|
+
async def all(self, pop: bool = True) -> bool:
|
|
395
|
+
try:
|
|
396
|
+
async for key, result in self.__aiter__(pop=pop):
|
|
397
|
+
if not bool(result):
|
|
398
|
+
return False
|
|
399
|
+
return True
|
|
400
|
+
except _EmptySequenceError:
|
|
401
|
+
return True
|
|
402
|
+
finally:
|
|
403
|
+
await self._if_pop_clear(pop)
|
|
404
|
+
|
|
405
|
+
@ASyncMethodDescriptorSyncDefault
|
|
406
|
+
async def any(self, pop: bool = True) -> bool:
|
|
407
|
+
try:
|
|
408
|
+
async for key, result in self.__aiter__(pop=pop):
|
|
409
|
+
if bool(result):
|
|
410
|
+
return True
|
|
411
|
+
return False
|
|
412
|
+
except _EmptySequenceError:
|
|
413
|
+
return False
|
|
414
|
+
finally:
|
|
415
|
+
await self._if_pop_clear(pop)
|
|
416
|
+
|
|
417
|
+
@ASyncMethodDescriptorSyncDefault
|
|
418
|
+
async def max(self, pop: bool = True) -> V:
|
|
419
|
+
# sourcery skip: avoid-builtin-shadow
|
|
420
|
+
max = None
|
|
421
|
+
try:
|
|
422
|
+
async for key, result in self.__aiter__(pop=pop):
|
|
423
|
+
if max is None or result > max:
|
|
424
|
+
max = result
|
|
425
|
+
except _EmptySequenceError:
|
|
426
|
+
raise exceptions.EmptySequenceError("max() arg is an empty sequence") from None
|
|
427
|
+
if max is None:
|
|
428
|
+
raise exceptions.EmptySequenceError("max() arg is an empty sequence") from None
|
|
429
|
+
return max
|
|
430
|
+
|
|
431
|
+
@ASyncMethodDescriptorSyncDefault
|
|
432
|
+
async def min(self, pop: bool = True) -> V:
|
|
433
|
+
# sourcery skip: avoid-builtin-shadow
|
|
434
|
+
"""Return the minimum result from the tasks in the mapping."""
|
|
435
|
+
min = None
|
|
436
|
+
try:
|
|
437
|
+
async for key, result in self.__aiter__(pop=pop):
|
|
438
|
+
if min is None or result < min:
|
|
439
|
+
min = result
|
|
440
|
+
except _EmptySequenceError:
|
|
441
|
+
raise exceptions.EmptySequenceError("min() arg is an empty sequence") from None
|
|
442
|
+
if min is None:
|
|
443
|
+
raise exceptions.EmptySequenceError("min() arg is an empty sequence") from None
|
|
444
|
+
return min
|
|
445
|
+
|
|
446
|
+
@ASyncMethodDescriptorSyncDefault
|
|
447
|
+
async def sum(self, pop: bool = False) -> V:
|
|
448
|
+
"""Return the sum of the results from the tasks in the mapping."""
|
|
449
|
+
retval = 0
|
|
450
|
+
try:
|
|
451
|
+
async for key, result in self.__aiter__(pop=pop):
|
|
452
|
+
retval += result
|
|
453
|
+
except _EmptySequenceError:
|
|
454
|
+
return 0
|
|
455
|
+
return retval
|
|
456
|
+
|
|
457
|
+
@ASyncIterator.wrap
|
|
458
|
+
async def yield_completed(self, pop: bool = True) -> AsyncIterator[Tuple[K, V]]:
|
|
459
|
+
"""
|
|
460
|
+
Asynchronously yield tuples of key-value pairs representing the results of any completed tasks.
|
|
461
|
+
|
|
462
|
+
Args:
|
|
463
|
+
pop: Whether to remove tasks from the internal storage once they are completed.
|
|
464
|
+
|
|
465
|
+
Yields:
|
|
466
|
+
Tuples of key-value pairs representing the results of completed tasks.
|
|
467
|
+
|
|
468
|
+
Example:
|
|
469
|
+
async def process_item(item: int) -> int:
|
|
470
|
+
await asyncio.sleep(1)
|
|
471
|
+
return item * 2
|
|
472
|
+
|
|
473
|
+
task_map = TaskMapping(process_item, [1, 2, 3])
|
|
474
|
+
async for key, result in task_map.yield_completed():
|
|
475
|
+
print(f"Completed {key}: {result}")
|
|
476
|
+
"""
|
|
477
|
+
task: Task
|
|
478
|
+
if pop:
|
|
479
|
+
self_pop = self.pop
|
|
480
|
+
for k in tuple(k for k, task in dict.items(self) if task.done()):
|
|
481
|
+
yield k, self_pop(k).result()
|
|
482
|
+
else:
|
|
483
|
+
for k, task in dict.items(self):
|
|
484
|
+
if task.done():
|
|
485
|
+
yield k, task.result()
|
|
486
|
+
|
|
487
|
+
@ASyncMethodDescriptorSyncDefault
|
|
488
|
+
async def gather(
|
|
489
|
+
self,
|
|
490
|
+
return_exceptions: bool = False,
|
|
491
|
+
exclude_if: Excluder[V] = None,
|
|
492
|
+
tqdm: bool = False,
|
|
493
|
+
**tqdm_kwargs: Any,
|
|
494
|
+
) -> Dict[K, V]:
|
|
495
|
+
"""Wait for all tasks to complete and return a dictionary of the results."""
|
|
496
|
+
if self._init_loader:
|
|
497
|
+
await self._init_loader
|
|
498
|
+
self._raise_if_empty()
|
|
499
|
+
return await gather(
|
|
500
|
+
self,
|
|
501
|
+
return_exceptions=return_exceptions,
|
|
502
|
+
exclude_if=exclude_if,
|
|
503
|
+
tqdm=tqdm,
|
|
504
|
+
**tqdm_kwargs,
|
|
505
|
+
)
|
|
506
|
+
|
|
507
|
+
@overload
|
|
508
|
+
def pop(self, item: K, *, cancel: bool = False) -> "Union[Task[V], Future[V]]":
|
|
509
|
+
"""Pop a task from the TaskMapping.
|
|
510
|
+
|
|
511
|
+
Args:
|
|
512
|
+
item: The key to pop.
|
|
513
|
+
cancel: Whether to cancel the task when popping it.
|
|
514
|
+
"""
|
|
515
|
+
|
|
516
|
+
@overload
|
|
517
|
+
def pop(self, item: K, default: K, *, cancel: bool = False) -> "Union[Task[V], Future[V]]":
|
|
518
|
+
"""Pop a task from the TaskMapping.
|
|
519
|
+
|
|
520
|
+
Args:
|
|
521
|
+
item: The key to pop.
|
|
522
|
+
default: The default value to return if no matching key is found.
|
|
523
|
+
cancel: Whether to cancel the task when popping it.
|
|
524
|
+
"""
|
|
525
|
+
|
|
526
|
+
def pop(self, *args: K, cancel: bool = False) -> "Union[Task[V], Future[V]]":
|
|
527
|
+
"""Pop a task from the TaskMapping.
|
|
528
|
+
|
|
529
|
+
Args:
|
|
530
|
+
*args: One key to pop.
|
|
531
|
+
cancel: Whether to cancel the task when popping it.
|
|
532
|
+
"""
|
|
533
|
+
fut_or_task = dict.pop(self, *args)
|
|
534
|
+
if cancel:
|
|
535
|
+
fut_or_task.cancel()
|
|
536
|
+
return fut_or_task
|
|
537
|
+
|
|
538
|
+
def clear(self, cancel: bool = False) -> None:
|
|
539
|
+
"""# TODO write docs for this"""
|
|
540
|
+
if cancel and self._init_loader and not self._init_loader.done():
|
|
541
|
+
logger.debug("cancelling %s", self._init_loader)
|
|
542
|
+
self._init_loader.cancel()
|
|
543
|
+
if keys := tuple(self.keys()):
|
|
544
|
+
logger.debug("popping remaining %s tasks", self)
|
|
545
|
+
pop = self.pop
|
|
546
|
+
for k in keys:
|
|
547
|
+
pop(k, cancel=cancel)
|
|
548
|
+
|
|
549
|
+
@cached_property_unsafe
|
|
550
|
+
def _init_loader(self) -> Optional["Task[None]"]:
|
|
551
|
+
# sourcery skip: raise-from-previous-error
|
|
552
|
+
if self.__init_loader_coro is None:
|
|
553
|
+
return None
|
|
554
|
+
|
|
555
|
+
logger.debug("starting %s init loader", self)
|
|
556
|
+
if len(iterables := self.__iterables__) == 1:
|
|
557
|
+
iterables_repr = repr_trunc(iterables[0])
|
|
558
|
+
else:
|
|
559
|
+
iterables_repr = f"({', '.join(map(repr_trunc, iterables))})"
|
|
560
|
+
try:
|
|
561
|
+
task = create_task(
|
|
562
|
+
coro=self.__init_loader_coro,
|
|
563
|
+
name=f"{type(self).__name__} init loader loading {iterables_repr} for {self}",
|
|
564
|
+
)
|
|
565
|
+
except RuntimeError as e:
|
|
566
|
+
raise _NoRunningLoop if str(e) == "no running event loop" else e
|
|
567
|
+
task.add_done_callback(self.__cleanup)
|
|
568
|
+
return task
|
|
569
|
+
|
|
570
|
+
@cached_property_unsafe
|
|
571
|
+
def _queue(self) -> ProcessingQueue:
|
|
572
|
+
fn = lambda arg: self._wrapped_func(arg, **self._wrapped_func_kwargs)
|
|
573
|
+
return ProcessingQueue(fn, self.concurrency, name=self._name)
|
|
574
|
+
|
|
575
|
+
def _raise_if_empty(self, msg: str = "") -> None:
|
|
576
|
+
if not self:
|
|
577
|
+
raise exceptions.MappingIsEmptyError(self, msg)
|
|
578
|
+
|
|
579
|
+
def _raise_if_not_empty(self) -> None:
|
|
580
|
+
if self:
|
|
581
|
+
raise exceptions.MappingNotEmptyError(self)
|
|
582
|
+
|
|
583
|
+
@ASyncGeneratorFunction
|
|
584
|
+
async def _tasks_for_iterables(
|
|
585
|
+
self, *iterables: AnyIterableOrAwaitableIterable[K]
|
|
586
|
+
) -> AsyncIterator[Tuple[K, "Task[V]"]]:
|
|
587
|
+
"""Ensure tasks are running for each key in the provided iterables."""
|
|
588
|
+
# if we have any regular containers we can yield their contents right away
|
|
589
|
+
containers = tuple(
|
|
590
|
+
iterable
|
|
591
|
+
for iterable in iterables
|
|
592
|
+
if not isinstance(iterable, AsyncIterable) and isinstance(iterable, Iterable)
|
|
593
|
+
)
|
|
594
|
+
for iterable in containers:
|
|
595
|
+
async for key in _yield_keys(iterable):
|
|
596
|
+
yield key, self[key]
|
|
597
|
+
|
|
598
|
+
if remaining := tuple(iterable for iterable in iterables if iterable not in containers):
|
|
599
|
+
try:
|
|
600
|
+
async for key in as_yielded(*(_yield_keys(iterable) for iterable in remaining)): # type: ignore [attr-defined]
|
|
601
|
+
yield key, self[key] # ensure task is running
|
|
602
|
+
except _EmptySequenceError:
|
|
603
|
+
if len(iterables) == 1:
|
|
604
|
+
raise
|
|
605
|
+
raise RuntimeError("DEV: figure out how to handle this situation") from None
|
|
606
|
+
|
|
607
|
+
@ASyncGeneratorFunction
|
|
608
|
+
async def _start_tasks_for_iterables(
|
|
609
|
+
self, *iterables: AnyIterableOrAwaitableIterable[K]
|
|
610
|
+
) -> AsyncIterator[Tuple[K, "Task[V]"]]:
|
|
611
|
+
"""Start new tasks for each key in the provided iterables."""
|
|
612
|
+
# if we have any regular containers we can yield their contents right away
|
|
613
|
+
containers = tuple(
|
|
614
|
+
iterable
|
|
615
|
+
for iterable in iterables
|
|
616
|
+
if not isinstance(iterable, AsyncIterable) and isinstance(iterable, Iterable)
|
|
617
|
+
)
|
|
618
|
+
for iterable in containers:
|
|
619
|
+
async for key in _yield_keys(iterable):
|
|
620
|
+
yield key, self.__start_task(key)
|
|
621
|
+
|
|
622
|
+
if remaining := tuple(iterable for iterable in iterables if iterable not in containers):
|
|
623
|
+
try:
|
|
624
|
+
async for key in as_yielded(*(_yield_keys(iterable) for iterable in remaining)): # type: ignore [attr-defined]
|
|
625
|
+
yield key, self.__start_task(key)
|
|
626
|
+
except _EmptySequenceError:
|
|
627
|
+
if len(iterables) == 1:
|
|
628
|
+
raise
|
|
629
|
+
raise RuntimeError("DEV: figure out how to handle this situation") from None
|
|
630
|
+
|
|
631
|
+
def _if_pop_check_destroyed(self, pop: bool) -> None:
|
|
632
|
+
if pop:
|
|
633
|
+
if self._destroyed:
|
|
634
|
+
raise RuntimeError(f"{self} has already been consumed")
|
|
635
|
+
self._destroyed = True
|
|
636
|
+
|
|
637
|
+
async def _if_pop_clear(self, pop: bool) -> None:
|
|
638
|
+
if pop:
|
|
639
|
+
self._destroyed = True
|
|
640
|
+
# _queue is a cached_property, we don't want to create it if it doesn't exist
|
|
641
|
+
if self.concurrency and "_queue" in self.__dict__:
|
|
642
|
+
self._queue.close()
|
|
643
|
+
del self._queue
|
|
644
|
+
self.clear(cancel=True)
|
|
645
|
+
# we need to let the loop run once so the tasks can fully cancel
|
|
646
|
+
await yield_to_loop()
|
|
647
|
+
|
|
648
|
+
async def _wait_for_next_key(self) -> None:
|
|
649
|
+
# NOTE if `_init_loader` has an exception it will return first, otherwise `_init_loader_next` will return always
|
|
650
|
+
done, pending = await wait(
|
|
651
|
+
(
|
|
652
|
+
create_task(self._init_loader_next(), log_destroy_pending=False),
|
|
653
|
+
self._init_loader,
|
|
654
|
+
),
|
|
655
|
+
return_when=FIRST_COMPLETED,
|
|
656
|
+
)
|
|
657
|
+
task: Task
|
|
658
|
+
for task in done:
|
|
659
|
+
# check for exceptions
|
|
660
|
+
task.result()
|
|
661
|
+
|
|
662
|
+
def __start_task(self, item: K) -> "Future[V]":
|
|
663
|
+
if self.concurrency:
|
|
664
|
+
# NOTE: we use a queue instead of a Semaphore to reduce memory use for use cases involving many many tasks
|
|
665
|
+
fut = self._queue.put_nowait(item)
|
|
666
|
+
else:
|
|
667
|
+
fut = create_task(
|
|
668
|
+
coro=self._wrapped_func(item, **self._wrapped_func_kwargs),
|
|
669
|
+
name=f"{item}" if self._name is None else f"{self._name}[{item}]",
|
|
670
|
+
)
|
|
671
|
+
dict.__setitem__(self, item, fut)
|
|
672
|
+
return fut
|
|
673
|
+
|
|
674
|
+
def __cleanup(self, t: "Task[None]") -> None:
|
|
675
|
+
# clear the slot and let the bound Queue die
|
|
676
|
+
del self.__init_loader_coro
|
|
677
|
+
|
|
678
|
+
|
|
679
|
+
class _NoRunningLoop(Exception): ...
|
|
680
|
+
|
|
681
|
+
|
|
682
|
+
@overload
|
|
683
|
+
def _yield(
|
|
684
|
+
key: K, value: V, yields: Literal["keys"]
|
|
685
|
+
) -> K: ... # TODO write specific docs for this overload
|
|
686
|
+
@overload
|
|
687
|
+
def _yield(
|
|
688
|
+
key: K, value: V, yields: Literal["both"]
|
|
689
|
+
) -> Tuple[K, V]: ... # TODO write specific docs for this overload
|
|
690
|
+
def _yield(key: K, value: V, yields: Literal["keys", "both"]) -> Union[K, Tuple[K, V]]:
|
|
691
|
+
"""
|
|
692
|
+
Yield either the key, value, or both based on the 'yields' parameter.
|
|
693
|
+
|
|
694
|
+
Args:
|
|
695
|
+
key: The key of the task.
|
|
696
|
+
value: The result of the task.
|
|
697
|
+
yields: Determines what to yield; 'keys' for keys, 'both' for key-value pairs.
|
|
698
|
+
|
|
699
|
+
Returns:
|
|
700
|
+
The key, the value, or a tuple of both based on the 'yields' parameter.
|
|
701
|
+
"""
|
|
702
|
+
if yields == "both":
|
|
703
|
+
return key, value
|
|
704
|
+
elif yields == "keys":
|
|
705
|
+
return key
|
|
706
|
+
else:
|
|
707
|
+
raise ValueError(f"`yields` must be 'keys' or 'both'. You passed {yields}")
|
|
708
|
+
|
|
709
|
+
|
|
710
|
+
class _EmptySequenceError(ValueError): ...
|
|
711
|
+
|
|
712
|
+
|
|
713
|
+
async def _yield_keys(iterable: AnyIterableOrAwaitableIterable[K]) -> AsyncIterator[K]:
|
|
714
|
+
"""
|
|
715
|
+
Asynchronously yield keys from the provided iterable.
|
|
716
|
+
|
|
717
|
+
Args:
|
|
718
|
+
iterable: An iterable that can be either synchronous or asynchronous.
|
|
719
|
+
|
|
720
|
+
Yields:
|
|
721
|
+
Keys extracted from the iterable.
|
|
722
|
+
"""
|
|
723
|
+
if not iterable:
|
|
724
|
+
raise _EmptySequenceError(iterable)
|
|
725
|
+
|
|
726
|
+
elif isinstance(iterable, AsyncIterable):
|
|
727
|
+
async for key in iterable:
|
|
728
|
+
yield key
|
|
729
|
+
|
|
730
|
+
elif isinstance(iterable, Iterable):
|
|
731
|
+
yielded = 0
|
|
732
|
+
for key in iterable:
|
|
733
|
+
yield key
|
|
734
|
+
yielded += 1
|
|
735
|
+
if not yielded % 5_000: # arbitrary number, should be adjusted later
|
|
736
|
+
await yield_to_loop()
|
|
737
|
+
|
|
738
|
+
elif isawaitable(iterable):
|
|
739
|
+
async for key in _yield_keys(await iterable):
|
|
740
|
+
yield key
|
|
741
|
+
|
|
742
|
+
else:
|
|
743
|
+
raise TypeError(iterable)
|
|
744
|
+
|
|
745
|
+
|
|
746
|
+
__unwrapped = WeakKeyDictionary()
|
|
747
|
+
|
|
748
|
+
|
|
749
|
+
def _unwrap(
|
|
750
|
+
wrapped_func: Union[
|
|
751
|
+
AnyFn[P, T], "ASyncMethodDescriptor[P, T]", _ASyncPropertyDescriptorBase[I, T]
|
|
752
|
+
],
|
|
753
|
+
) -> Callable[P, Awaitable[T]]:
|
|
754
|
+
if unwrapped := __unwrapped.get(wrapped_func):
|
|
755
|
+
return unwrapped
|
|
756
|
+
if isinstance(wrapped_func, (ASyncBoundMethod, ASyncMethodDescriptor)):
|
|
757
|
+
unwrapped = wrapped_func
|
|
758
|
+
elif isinstance(wrapped_func, _ASyncPropertyDescriptorBase):
|
|
759
|
+
unwrapped = wrapped_func.get
|
|
760
|
+
elif isinstance(wrapped_func, ASyncFunction):
|
|
761
|
+
# this speeds things up a bit by bypassing some logic
|
|
762
|
+
# TODO implement it like this elsewhere if profilers suggest
|
|
763
|
+
unwrapped = (
|
|
764
|
+
wrapped_func._modified_fn if wrapped_func.is_async_def() else wrapped_func._asyncified
|
|
765
|
+
)
|
|
766
|
+
else:
|
|
767
|
+
unwrapped = wrapped_func
|
|
768
|
+
__unwrapped[wrapped_func] = unwrapped
|
|
769
|
+
return unwrapped
|
|
770
|
+
|
|
771
|
+
|
|
772
|
+
_get_key: Callable[[Tuple[K, V]], K] = lambda k_and_v: k_and_v[0]
|
|
773
|
+
_get_value: Callable[[Tuple[K, V]], V] = lambda k_and_v: k_and_v[1]
|
|
774
|
+
|
|
775
|
+
|
|
776
|
+
class _TaskMappingView(ASyncGenericBase, Iterable[T], Generic[T, K, V]):
|
|
777
|
+
"""
|
|
778
|
+
Base class for TaskMapping views that provides common functionality.
|
|
779
|
+
"""
|
|
780
|
+
|
|
781
|
+
_get_from_item: Callable[[Tuple[K, V]], T]
|
|
782
|
+
_pop: bool = False
|
|
783
|
+
|
|
784
|
+
__slots__ = "__view__", "__mapping__"
|
|
785
|
+
|
|
786
|
+
def __init__(
|
|
787
|
+
self, view: Iterable[T], task_mapping: TaskMapping[K, V], pop: bool = False
|
|
788
|
+
) -> None:
|
|
789
|
+
self.__view__ = view
|
|
790
|
+
self.__mapping__: TaskMapping = proxy(task_mapping)
|
|
791
|
+
"actually a weakref.ProxyType[TaskMapping] but then type hints weren't working"
|
|
792
|
+
if pop:
|
|
793
|
+
self._pop = True
|
|
794
|
+
|
|
795
|
+
def __iter__(self) -> Iterator[T]:
|
|
796
|
+
return iter(self.__view__)
|
|
797
|
+
|
|
798
|
+
def __await__(self) -> Generator[Any, None, List[T]]:
|
|
799
|
+
return self.__await().__await__()
|
|
800
|
+
|
|
801
|
+
def __len__(self) -> int:
|
|
802
|
+
return len(self.__view__)
|
|
803
|
+
|
|
804
|
+
async def aiterbykeys(self, reverse: bool = False) -> ASyncIterator[T]:
|
|
805
|
+
async for tup in ASyncSorter(
|
|
806
|
+
self.__mapping__.items(pop=self._pop), key=_get_key, reverse=reverse
|
|
807
|
+
):
|
|
808
|
+
yield self._get_from_item(tup)
|
|
809
|
+
|
|
810
|
+
async def aiterbyvalues(self, reverse: bool = False) -> ASyncIterator[T]:
|
|
811
|
+
async for tup in ASyncSorter(
|
|
812
|
+
self.__mapping__.items(pop=self._pop), key=_get_value, reverse=reverse
|
|
813
|
+
):
|
|
814
|
+
yield self._get_from_item(tup)
|
|
815
|
+
|
|
816
|
+
async def __await(self) -> List[T]:
|
|
817
|
+
return [result async for result in self]
|
|
818
|
+
|
|
819
|
+
|
|
820
|
+
class TaskMappingKeys(_TaskMappingView[K, K, V], Generic[K, V]):
|
|
821
|
+
"""
|
|
822
|
+
Asynchronous view to iterate over the keys of a TaskMapping.
|
|
823
|
+
"""
|
|
824
|
+
|
|
825
|
+
_get_from_item = lambda self, item: _get_key(item)
|
|
826
|
+
|
|
827
|
+
async def __aiter__(self) -> AsyncIterator[K]:
|
|
828
|
+
# strongref
|
|
829
|
+
mapping = self.__mapping__
|
|
830
|
+
mapping._if_pop_check_destroyed(self._pop)
|
|
831
|
+
yielded = set()
|
|
832
|
+
add_yielded = yielded.add
|
|
833
|
+
for key in self.__load_existing():
|
|
834
|
+
add_yielded(key)
|
|
835
|
+
# there is no chance of duplicate keys here
|
|
836
|
+
yield key
|
|
837
|
+
if mapping._init_loader is None:
|
|
838
|
+
await mapping._if_pop_clear(self._pop)
|
|
839
|
+
return
|
|
840
|
+
async for key in self.__load_init_loader(yielded):
|
|
841
|
+
add_yielded(key)
|
|
842
|
+
yield key
|
|
843
|
+
if self._pop:
|
|
844
|
+
# don't need to check yielded since we've been popping them as we go
|
|
845
|
+
for key in self.__load_existing():
|
|
846
|
+
yield key
|
|
847
|
+
await mapping._if_pop_clear(True)
|
|
848
|
+
else:
|
|
849
|
+
for key in self.__load_existing():
|
|
850
|
+
if key not in yielded:
|
|
851
|
+
yield key
|
|
852
|
+
|
|
853
|
+
def __load_existing(self) -> Iterator[K]:
|
|
854
|
+
# strongref
|
|
855
|
+
mapping = self.__mapping__
|
|
856
|
+
if self._pop:
|
|
857
|
+
pop = mapping.pop
|
|
858
|
+
for key in tuple(mapping):
|
|
859
|
+
pop(key)
|
|
860
|
+
yield key
|
|
861
|
+
else:
|
|
862
|
+
yield from tuple(mapping)
|
|
863
|
+
|
|
864
|
+
async def __load_init_loader(self, yielded: Set[K]) -> AsyncIterator[K]:
|
|
865
|
+
# sourcery skip: hoist-loop-from-if
|
|
866
|
+
# strongref
|
|
867
|
+
mapping = self.__mapping__
|
|
868
|
+
done = mapping._init_loader.done
|
|
869
|
+
wait_for_next_key = mapping._wait_for_next_key
|
|
870
|
+
|
|
871
|
+
if self._pop:
|
|
872
|
+
pop = mapping.pop
|
|
873
|
+
while not done():
|
|
874
|
+
await wait_for_next_key()
|
|
875
|
+
for key in tuple(filterfalse(yielded.__contains__, mapping)):
|
|
876
|
+
pop(key)
|
|
877
|
+
yield key
|
|
878
|
+
else:
|
|
879
|
+
while not done():
|
|
880
|
+
await wait_for_next_key()
|
|
881
|
+
for key in tuple(filterfalse(yielded.__contains__, mapping)):
|
|
882
|
+
yield key
|
|
883
|
+
# check for any exceptions
|
|
884
|
+
await mapping._init_loader
|
|
885
|
+
|
|
886
|
+
|
|
887
|
+
class TaskMappingItems(_TaskMappingView[Tuple[K, V], K, V], Generic[K, V]):
|
|
888
|
+
"""
|
|
889
|
+
Asynchronous view to iterate over the items (key-value pairs) of a TaskMapping.
|
|
890
|
+
"""
|
|
891
|
+
|
|
892
|
+
_get_from_item = lambda self, item: item
|
|
893
|
+
|
|
894
|
+
async def __aiter__(self) -> AsyncIterator[Tuple[K, V]]:
|
|
895
|
+
# strongref
|
|
896
|
+
mapping = self.__mapping__
|
|
897
|
+
mapping._if_pop_check_destroyed(self._pop)
|
|
898
|
+
if self._pop:
|
|
899
|
+
pop = mapping.pop
|
|
900
|
+
async for key in mapping.keys():
|
|
901
|
+
yield key, await pop(key)
|
|
902
|
+
else:
|
|
903
|
+
async for key in mapping.keys():
|
|
904
|
+
yield key, await mapping[key]
|
|
905
|
+
|
|
906
|
+
|
|
907
|
+
class TaskMappingValues(_TaskMappingView[V, K, V], Generic[K, V]):
|
|
908
|
+
"""
|
|
909
|
+
Asynchronous view to iterate over the values of a TaskMapping.
|
|
910
|
+
"""
|
|
911
|
+
|
|
912
|
+
_get_from_item = lambda self, item: _get_value(item)
|
|
913
|
+
|
|
914
|
+
async def __aiter__(self) -> AsyncIterator[V]:
|
|
915
|
+
# strongref
|
|
916
|
+
mapping = self.__mapping__
|
|
917
|
+
mapping._if_pop_check_destroyed(self._pop)
|
|
918
|
+
if self._pop:
|
|
919
|
+
pop = mapping.pop
|
|
920
|
+
async for key in mapping.keys():
|
|
921
|
+
yield await pop(key)
|
|
922
|
+
else:
|
|
923
|
+
async for key in mapping.keys():
|
|
924
|
+
yield await mapping[key]
|
|
925
|
+
|
|
926
|
+
|
|
927
|
+
__all__ = [
|
|
928
|
+
"TaskMapping",
|
|
929
|
+
"TaskMappingKeys",
|
|
930
|
+
"TaskMappingValues",
|
|
931
|
+
"TaskMappingItems",
|
|
932
|
+
]
|