ez-a-sync 0.22.13__py3-none-any.whl → 0.22.15__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ez-a-sync might be problematic. Click here for more details.
- a_sync/ENVIRONMENT_VARIABLES.py +4 -3
- a_sync/__init__.py +30 -12
- a_sync/_smart.py +132 -28
- a_sync/_typing.py +56 -12
- a_sync/a_sync/__init__.py +35 -10
- a_sync/a_sync/_descriptor.py +74 -26
- a_sync/a_sync/_flags.py +14 -6
- a_sync/a_sync/_helpers.py +8 -7
- a_sync/a_sync/_kwargs.py +3 -2
- a_sync/a_sync/_meta.py +120 -28
- a_sync/a_sync/abstract.py +102 -28
- a_sync/a_sync/base.py +34 -16
- a_sync/a_sync/config.py +47 -13
- a_sync/a_sync/decorator.py +239 -117
- a_sync/a_sync/function.py +416 -146
- a_sync/a_sync/method.py +197 -59
- a_sync/a_sync/modifiers/__init__.py +47 -5
- a_sync/a_sync/modifiers/cache/__init__.py +46 -17
- a_sync/a_sync/modifiers/cache/memory.py +86 -20
- a_sync/a_sync/modifiers/limiter.py +52 -22
- a_sync/a_sync/modifiers/manager.py +98 -16
- a_sync/a_sync/modifiers/semaphores.py +48 -15
- a_sync/a_sync/property.py +383 -82
- a_sync/a_sync/singleton.py +1 -0
- a_sync/aliases.py +0 -1
- a_sync/asyncio/__init__.py +4 -1
- a_sync/asyncio/as_completed.py +177 -49
- a_sync/asyncio/create_task.py +31 -17
- a_sync/asyncio/gather.py +72 -52
- a_sync/asyncio/utils.py +3 -3
- a_sync/exceptions.py +78 -23
- a_sync/executor.py +120 -71
- a_sync/future.py +575 -158
- a_sync/iter.py +110 -50
- a_sync/primitives/__init__.py +14 -2
- a_sync/primitives/_debug.py +13 -13
- a_sync/primitives/_loggable.py +5 -4
- a_sync/primitives/locks/__init__.py +5 -2
- a_sync/primitives/locks/counter.py +38 -36
- a_sync/primitives/locks/event.py +21 -7
- a_sync/primitives/locks/prio_semaphore.py +182 -62
- a_sync/primitives/locks/semaphore.py +78 -77
- a_sync/primitives/queue.py +560 -58
- a_sync/sphinx/__init__.py +0 -1
- a_sync/sphinx/ext.py +160 -50
- a_sync/task.py +262 -97
- a_sync/utils/__init__.py +12 -6
- a_sync/utils/iterators.py +127 -43
- {ez_a_sync-0.22.13.dist-info → ez_a_sync-0.22.15.dist-info}/METADATA +1 -1
- ez_a_sync-0.22.15.dist-info/RECORD +74 -0
- {ez_a_sync-0.22.13.dist-info → ez_a_sync-0.22.15.dist-info}/WHEEL +1 -1
- tests/conftest.py +1 -2
- tests/executor.py +112 -9
- tests/fixtures.py +61 -32
- tests/test_abstract.py +7 -4
- tests/test_as_completed.py +54 -21
- tests/test_base.py +66 -17
- tests/test_cache.py +31 -15
- tests/test_decorator.py +54 -28
- tests/test_executor.py +8 -13
- tests/test_future.py +45 -8
- tests/test_gather.py +8 -2
- tests/test_helpers.py +2 -0
- tests/test_iter.py +55 -13
- tests/test_limiter.py +5 -3
- tests/test_meta.py +23 -9
- tests/test_modified.py +4 -1
- tests/test_semaphore.py +15 -8
- tests/test_singleton.py +15 -10
- tests/test_task.py +126 -28
- ez_a_sync-0.22.13.dist-info/RECORD +0 -74
- {ez_a_sync-0.22.13.dist-info → ez_a_sync-0.22.15.dist-info}/LICENSE.txt +0 -0
- {ez_a_sync-0.22.13.dist-info → ez_a_sync-0.22.15.dist-info}/top_level.txt +0 -0
a_sync/task.py
CHANGED
|
@@ -1,3 +1,12 @@
|
|
|
1
|
+
"""
|
|
2
|
+
This module provides asynchronous task management utilities, specifically focused on creating and handling mappings of tasks.
|
|
3
|
+
|
|
4
|
+
The main components include:
|
|
5
|
+
- TaskMapping: A class for managing and asynchronously generating tasks based on input iterables.
|
|
6
|
+
- TaskMappingKeys: A view to asynchronously iterate over the keys of a TaskMapping.
|
|
7
|
+
- TaskMappingValues: A view to asynchronously iterate over the values of a TaskMapping.
|
|
8
|
+
- TaskMappingItems: A view to asynchronously iterate over the items (key-value pairs) of a TaskMapping.
|
|
9
|
+
"""
|
|
1
10
|
|
|
2
11
|
import asyncio
|
|
3
12
|
import contextlib
|
|
@@ -12,7 +21,11 @@ from a_sync._typing import *
|
|
|
12
21
|
from a_sync.a_sync import _kwargs
|
|
13
22
|
from a_sync.a_sync.base import ASyncGenericBase
|
|
14
23
|
from a_sync.a_sync.function import ASyncFunction
|
|
15
|
-
from a_sync.a_sync.method import
|
|
24
|
+
from a_sync.a_sync.method import (
|
|
25
|
+
ASyncBoundMethod,
|
|
26
|
+
ASyncMethodDescriptor,
|
|
27
|
+
ASyncMethodDescriptorSyncDefault,
|
|
28
|
+
)
|
|
16
29
|
from a_sync.a_sync.property import _ASyncPropertyDescriptorBase
|
|
17
30
|
from a_sync.asyncio.as_completed import as_completed
|
|
18
31
|
from a_sync.asyncio.gather import Excluder, gather
|
|
@@ -25,9 +38,9 @@ from a_sync.utils.iterators import as_yielded, exhaust_iterator
|
|
|
25
38
|
logger = logging.getLogger(__name__)
|
|
26
39
|
|
|
27
40
|
|
|
28
|
-
|
|
29
41
|
MappingFn = Callable[Concatenate[K, P], Awaitable[V]]
|
|
30
42
|
|
|
43
|
+
|
|
31
44
|
class TaskMapping(DefaultDict[K, "asyncio.Task[V]"], AsyncIterable[Tuple[K, V]]):
|
|
32
45
|
"""
|
|
33
46
|
A mapping of keys to asynchronous tasks with additional functionality.
|
|
@@ -48,7 +61,7 @@ class TaskMapping(DefaultDict[K, "asyncio.Task[V]"], AsyncIterable[Tuple[K, V]])
|
|
|
48
61
|
async for key, result in tasks:
|
|
49
62
|
print(f"Data for {key}: {result}")
|
|
50
63
|
"""
|
|
51
|
-
|
|
64
|
+
|
|
52
65
|
concurrency: Optional[int] = None
|
|
53
66
|
"The max number of tasks that will run at one time."
|
|
54
67
|
|
|
@@ -57,8 +70,10 @@ class TaskMapping(DefaultDict[K, "asyncio.Task[V]"], AsyncIterable[Tuple[K, V]])
|
|
|
57
70
|
|
|
58
71
|
_init_loader: Optional["asyncio.Task[None]"] = None
|
|
59
72
|
"An asyncio Task used to preload values from the iterables."
|
|
60
|
-
|
|
61
|
-
_init_loader_next: Optional[
|
|
73
|
+
|
|
74
|
+
_init_loader_next: Optional[
|
|
75
|
+
Callable[[], Awaitable[Tuple[Tuple[K, "asyncio.Task[V]"]]]]
|
|
76
|
+
] = None
|
|
62
77
|
"A coro function that blocks until the _init_loader starts a new task(s), and then returns a `Tuple[Tuple[K, asyncio.Task[V]]]` with all of the new tasks and the keys that started them."
|
|
63
78
|
|
|
64
79
|
_name: Optional[str] = None
|
|
@@ -71,26 +86,27 @@ class TaskMapping(DefaultDict[K, "asyncio.Task[V]"], AsyncIterable[Tuple[K, V]])
|
|
|
71
86
|
"Additional keyword arguments passed to `_wrapped_func`."
|
|
72
87
|
|
|
73
88
|
__iterables__: Tuple[AnyIterableOrAwaitableIterable[K], ...] = ()
|
|
74
|
-
"The original iterables, if any, used to initialize this mapping."
|
|
75
|
-
|
|
89
|
+
"The original iterables, if any, used to initialize this mapping."
|
|
90
|
+
|
|
76
91
|
__init_loader_coro: Optional[Awaitable[None]] = None
|
|
77
92
|
"""An optional asyncio Coroutine to be run by the `_init_loader`"""
|
|
78
93
|
|
|
79
94
|
__slots__ = "_wrapped_func", "__wrapped__", "__dict__", "__weakref__"
|
|
95
|
+
|
|
80
96
|
# NOTE: maybe since we use so many classvars here we are better off getting rid of slots
|
|
81
97
|
def __init__(
|
|
82
|
-
self,
|
|
83
|
-
wrapped_func: MappingFn[K, P, V] = None,
|
|
84
|
-
*iterables: AnyIterableOrAwaitableIterable[K],
|
|
85
|
-
name: str =
|
|
86
|
-
concurrency: Optional[int] = None,
|
|
98
|
+
self,
|
|
99
|
+
wrapped_func: MappingFn[K, P, V] = None,
|
|
100
|
+
*iterables: AnyIterableOrAwaitableIterable[K],
|
|
101
|
+
name: str = "",
|
|
102
|
+
concurrency: Optional[int] = None,
|
|
87
103
|
**wrapped_func_kwargs: P.kwargs,
|
|
88
104
|
) -> None:
|
|
89
105
|
"""
|
|
90
106
|
Initialize a TaskMapping instance.
|
|
91
107
|
|
|
92
108
|
Args:
|
|
93
|
-
wrapped_func: A
|
|
109
|
+
wrapped_func: A callable that takes a key and additional parameters and returns an Awaitable.
|
|
94
110
|
*iterables: Any number of iterables whose elements will be used as keys for task generation.
|
|
95
111
|
name: An optional name for the tasks created by this mapping.
|
|
96
112
|
concurrency: Maximum number of tasks to run concurrently.
|
|
@@ -101,7 +117,7 @@ class TaskMapping(DefaultDict[K, "asyncio.Task[V]"], AsyncIterable[Tuple[K, V]])
|
|
|
101
117
|
self.concurrency = concurrency
|
|
102
118
|
|
|
103
119
|
self.__wrapped__ = wrapped_func
|
|
104
|
-
"The original callable used to initialize this mapping without any modifications."
|
|
120
|
+
"The original callable used to initialize this mapping without any modifications."
|
|
105
121
|
|
|
106
122
|
if iterables:
|
|
107
123
|
self.__iterables__ = iterables
|
|
@@ -121,48 +137,67 @@ class TaskMapping(DefaultDict[K, "asyncio.Task[V]"], AsyncIterable[Tuple[K, V]])
|
|
|
121
137
|
|
|
122
138
|
if iterables:
|
|
123
139
|
self._next = Event(name=f"{self} `_next`")
|
|
140
|
+
|
|
124
141
|
@functools.wraps(wrapped_func)
|
|
125
|
-
async def _wrapped_set_next(
|
|
142
|
+
async def _wrapped_set_next(
|
|
143
|
+
*args: P.args, __a_sync_recursion: int = 0, **kwargs: P.kwargs
|
|
144
|
+
) -> V:
|
|
126
145
|
try:
|
|
127
146
|
return await wrapped_func(*args, **kwargs)
|
|
128
147
|
except exceptions.SyncModeInAsyncContextError as e:
|
|
129
148
|
raise Exception(e, self.__wrapped__)
|
|
130
149
|
except TypeError as e:
|
|
131
|
-
if __a_sync_recursion > 2 or not (
|
|
150
|
+
if __a_sync_recursion > 2 or not (
|
|
151
|
+
str(e).startswith(wrapped_func.__name__)
|
|
152
|
+
and "got multiple values for argument" in str(e)
|
|
153
|
+
):
|
|
132
154
|
raise
|
|
133
155
|
# NOTE: args ordering is clashing with provided kwargs. We can handle this in a hacky way.
|
|
134
156
|
# TODO: perform this check earlier and pre-prepare the args/kwargs ordering
|
|
135
157
|
new_args = list(args)
|
|
136
158
|
new_kwargs = dict(kwargs)
|
|
137
159
|
try:
|
|
138
|
-
for i, arg in enumerate(
|
|
160
|
+
for i, arg in enumerate(
|
|
161
|
+
inspect.getfullargspec(self.__wrapped__).args
|
|
162
|
+
):
|
|
139
163
|
if arg in kwargs:
|
|
140
164
|
new_args.insert(i, new_kwargs.pop(arg))
|
|
141
165
|
else:
|
|
142
166
|
break
|
|
143
|
-
return await _wrapped_set_next(
|
|
167
|
+
return await _wrapped_set_next(
|
|
168
|
+
*new_args,
|
|
169
|
+
**new_kwargs,
|
|
170
|
+
__a_sync_recursion=__a_sync_recursion + 1,
|
|
171
|
+
)
|
|
144
172
|
except TypeError as e2:
|
|
145
|
-
raise
|
|
173
|
+
raise (
|
|
174
|
+
e.with_traceback(e.__traceback__)
|
|
175
|
+
if str(e2) == "unsupported callable"
|
|
176
|
+
else e2.with_traceback(e2.__traceback__)
|
|
177
|
+
)
|
|
146
178
|
finally:
|
|
147
179
|
self._next.set()
|
|
148
180
|
self._next.clear()
|
|
181
|
+
|
|
149
182
|
self._wrapped_func = _wrapped_set_next
|
|
150
183
|
init_loader_queue: Queue[Tuple[K, "asyncio.Future[V]"]] = Queue()
|
|
151
|
-
self.__init_loader_coro = exhaust_iterator(
|
|
184
|
+
self.__init_loader_coro = exhaust_iterator(
|
|
185
|
+
self._tasks_for_iterables(*iterables), queue=init_loader_queue
|
|
186
|
+
)
|
|
152
187
|
with contextlib.suppress(_NoRunningLoop):
|
|
153
188
|
# its okay if we get this exception, we can start the task as soon as the loop starts
|
|
154
189
|
self._init_loader
|
|
155
190
|
self._init_loader_next = init_loader_queue.get_all
|
|
156
|
-
|
|
191
|
+
|
|
157
192
|
def __repr__(self) -> str:
|
|
158
193
|
return f"<{type(self).__name__} for {self._wrapped_func} kwargs={self._wrapped_func_kwargs} tasks={len(self)} at {hex(id(self))}>"
|
|
159
|
-
|
|
194
|
+
|
|
160
195
|
def __hash__(self) -> int:
|
|
161
196
|
return id(self)
|
|
162
|
-
|
|
197
|
+
|
|
163
198
|
def __setitem__(self, item: Any, value: Any) -> None:
|
|
164
199
|
raise NotImplementedError("You cannot manually set items in a TaskMapping")
|
|
165
|
-
|
|
200
|
+
|
|
166
201
|
def __getitem__(self, item: K) -> "asyncio.Task[V]":
|
|
167
202
|
try:
|
|
168
203
|
return super().__getitem__(item)
|
|
@@ -172,17 +207,17 @@ class TaskMapping(DefaultDict[K, "asyncio.Task[V]"], AsyncIterable[Tuple[K, V]])
|
|
|
172
207
|
fut = self._queue.put_nowait(item)
|
|
173
208
|
else:
|
|
174
209
|
coro = self._wrapped_func(item, **self._wrapped_func_kwargs)
|
|
175
|
-
name = f"{self._name}[{item}]" if self._name else f"{item}",
|
|
210
|
+
name = (f"{self._name}[{item}]" if self._name else f"{item}",)
|
|
176
211
|
fut = create_task(coro=coro, name=name)
|
|
177
212
|
super().__setitem__(item, fut)
|
|
178
213
|
return fut
|
|
179
|
-
|
|
214
|
+
|
|
180
215
|
def __await__(self) -> Generator[Any, None, Dict[K, V]]:
|
|
181
216
|
"""Wait for all tasks to complete and return a dictionary of the results."""
|
|
182
217
|
return self.gather(sync=False).__await__()
|
|
183
218
|
|
|
184
219
|
async def __aiter__(self, pop: bool = False) -> AsyncIterator[Tuple[K, V]]:
|
|
185
|
-
"""
|
|
220
|
+
"""Asynchronously iterate through all key-task pairs, yielding the key-result pair as each task completes."""
|
|
186
221
|
self._if_pop_check_destroyed(pop)
|
|
187
222
|
|
|
188
223
|
# if you inited the TaskMapping with some iterators, we will load those
|
|
@@ -195,7 +230,9 @@ class TaskMapping(DefaultDict[K, "asyncio.Task[V]"], AsyncIterable[Tuple[K, V]])
|
|
|
195
230
|
while not self._init_loader.done():
|
|
196
231
|
await self._wait_for_next_key()
|
|
197
232
|
while unyielded := [key for key in self if key not in yielded]:
|
|
198
|
-
if ready := {
|
|
233
|
+
if ready := {
|
|
234
|
+
key: task for key in unyielded if (task := self[key]).done()
|
|
235
|
+
}:
|
|
199
236
|
if pop:
|
|
200
237
|
for key, task in ready.items():
|
|
201
238
|
yield key, await self.pop(key)
|
|
@@ -231,34 +268,41 @@ class TaskMapping(DefaultDict[K, "asyncio.Task[V]"], AsyncIterable[Tuple[K, V]])
|
|
|
231
268
|
|
|
232
269
|
def values(self, pop: bool = False) -> "TaskMappingValues[K, V]":
|
|
233
270
|
return TaskMappingValues(super().values(), self, pop=pop)
|
|
234
|
-
|
|
271
|
+
|
|
235
272
|
def items(self, pop: bool = False) -> "TaskMappingValues[K, V]":
|
|
236
273
|
return TaskMappingItems(super().items(), self, pop=pop)
|
|
237
|
-
|
|
274
|
+
|
|
238
275
|
async def close(self) -> None:
|
|
239
276
|
await self._if_pop_clear(True)
|
|
240
277
|
|
|
241
278
|
@ASyncGeneratorFunction
|
|
242
|
-
async def map(
|
|
279
|
+
async def map(
|
|
280
|
+
self,
|
|
281
|
+
*iterables: AnyIterableOrAwaitableIterable[K],
|
|
282
|
+
pop: bool = True,
|
|
283
|
+
yields: Literal["keys", "both"] = "both",
|
|
284
|
+
) -> AsyncIterator[Tuple[K, V]]:
|
|
243
285
|
"""
|
|
244
|
-
|
|
286
|
+
Asynchronously map iterables to tasks and yield their results.
|
|
245
287
|
|
|
246
|
-
|
|
288
|
+
Args:
|
|
247
289
|
*iterables: Iterables to map over.
|
|
248
290
|
pop: Whether to remove tasks from the internal storage once they are completed.
|
|
249
291
|
yields: Whether to yield 'keys', 'values', or 'both' (key-value pairs).
|
|
250
|
-
|
|
251
|
-
|
|
292
|
+
|
|
293
|
+
Yields:
|
|
252
294
|
Depending on `yields`, either keys, values,
|
|
253
295
|
or tuples of key-value pairs representing the results of completed tasks.
|
|
254
296
|
"""
|
|
255
297
|
self._if_pop_check_destroyed(pop)
|
|
256
|
-
|
|
298
|
+
|
|
257
299
|
# make sure the init loader is started if needed
|
|
258
300
|
init_loader = self._init_loader
|
|
259
301
|
if iterables and init_loader:
|
|
260
|
-
raise ValueError(
|
|
261
|
-
|
|
302
|
+
raise ValueError(
|
|
303
|
+
"You cannot pass `iterables` to map if the TaskMapping was initialized with an (a)iterable."
|
|
304
|
+
)
|
|
305
|
+
|
|
262
306
|
try:
|
|
263
307
|
if iterables:
|
|
264
308
|
self._raise_if_not_empty()
|
|
@@ -269,15 +313,19 @@ class TaskMapping(DefaultDict[K, "asyncio.Task[V]"], AsyncIterable[Tuple[K, V]])
|
|
|
269
313
|
except _EmptySequenceError:
|
|
270
314
|
if len(iterables) > 1:
|
|
271
315
|
# TODO gotta handle this situation
|
|
272
|
-
raise exceptions.EmptySequenceError(
|
|
316
|
+
raise exceptions.EmptySequenceError(
|
|
317
|
+
"bob needs to code something so you can do this, go tell him"
|
|
318
|
+
) from None
|
|
273
319
|
# just pass thru
|
|
274
|
-
|
|
320
|
+
|
|
275
321
|
elif init_loader:
|
|
276
322
|
# check for exceptions if you passed an iterable(s) into the class init
|
|
277
323
|
await init_loader
|
|
278
|
-
|
|
324
|
+
|
|
279
325
|
else:
|
|
280
|
-
self._raise_if_empty(
|
|
326
|
+
self._raise_if_empty(
|
|
327
|
+
"You must either initialize your TaskMapping with an iterable(s) or provide them during your call to map"
|
|
328
|
+
)
|
|
281
329
|
|
|
282
330
|
if self:
|
|
283
331
|
if pop:
|
|
@@ -289,7 +337,7 @@ class TaskMapping(DefaultDict[K, "asyncio.Task[V]"], AsyncIterable[Tuple[K, V]])
|
|
|
289
337
|
yield _yield(key, value, yields)
|
|
290
338
|
finally:
|
|
291
339
|
await self._if_pop_clear(pop)
|
|
292
|
-
|
|
340
|
+
|
|
293
341
|
@ASyncMethodDescriptorSyncDefault
|
|
294
342
|
async def all(self, pop: bool = True) -> bool:
|
|
295
343
|
try:
|
|
@@ -301,7 +349,7 @@ class TaskMapping(DefaultDict[K, "asyncio.Task[V]"], AsyncIterable[Tuple[K, V]])
|
|
|
301
349
|
return True
|
|
302
350
|
finally:
|
|
303
351
|
await self._if_pop_clear(pop)
|
|
304
|
-
|
|
352
|
+
|
|
305
353
|
@ASyncMethodDescriptorSyncDefault
|
|
306
354
|
async def any(self, pop: bool = True) -> bool:
|
|
307
355
|
try:
|
|
@@ -313,7 +361,7 @@ class TaskMapping(DefaultDict[K, "asyncio.Task[V]"], AsyncIterable[Tuple[K, V]])
|
|
|
313
361
|
return False
|
|
314
362
|
finally:
|
|
315
363
|
await self._if_pop_clear(pop)
|
|
316
|
-
|
|
364
|
+
|
|
317
365
|
@ASyncMethodDescriptorSyncDefault
|
|
318
366
|
async def max(self, pop: bool = True) -> V:
|
|
319
367
|
max = None
|
|
@@ -322,26 +370,36 @@ class TaskMapping(DefaultDict[K, "asyncio.Task[V]"], AsyncIterable[Tuple[K, V]])
|
|
|
322
370
|
if max is None or result > max:
|
|
323
371
|
max = result
|
|
324
372
|
except _EmptySequenceError:
|
|
325
|
-
raise exceptions.EmptySequenceError(
|
|
373
|
+
raise exceptions.EmptySequenceError(
|
|
374
|
+
"max() arg is an empty sequence"
|
|
375
|
+
) from None
|
|
326
376
|
if max is None:
|
|
327
|
-
raise exceptions.EmptySequenceError(
|
|
377
|
+
raise exceptions.EmptySequenceError(
|
|
378
|
+
"max() arg is an empty sequence"
|
|
379
|
+
) from None
|
|
328
380
|
return max
|
|
329
|
-
|
|
381
|
+
|
|
330
382
|
@ASyncMethodDescriptorSyncDefault
|
|
331
383
|
async def min(self, pop: bool = True) -> V:
|
|
384
|
+
"""Return the minimum result from the tasks in the mapping."""
|
|
332
385
|
min = None
|
|
333
386
|
try:
|
|
334
387
|
async for key, result in self.__aiter__(pop=pop):
|
|
335
388
|
if min is None or result < min:
|
|
336
389
|
min = result
|
|
337
390
|
except _EmptySequenceError:
|
|
338
|
-
raise exceptions.EmptySequenceError(
|
|
391
|
+
raise exceptions.EmptySequenceError(
|
|
392
|
+
"min() arg is an empty sequence"
|
|
393
|
+
) from None
|
|
339
394
|
if min is None:
|
|
340
|
-
raise exceptions.EmptySequenceError(
|
|
395
|
+
raise exceptions.EmptySequenceError(
|
|
396
|
+
"min() arg is an empty sequence"
|
|
397
|
+
) from None
|
|
341
398
|
return min
|
|
342
|
-
|
|
399
|
+
|
|
343
400
|
@ASyncMethodDescriptorSyncDefault
|
|
344
401
|
async def sum(self, pop: bool = False) -> V:
|
|
402
|
+
"""Return the sum of the results from the tasks in the mapping."""
|
|
345
403
|
retval = 0
|
|
346
404
|
try:
|
|
347
405
|
async for key, result in self.__aiter__(pop=pop):
|
|
@@ -369,11 +427,11 @@ class TaskMapping(DefaultDict[K, "asyncio.Task[V]"], AsyncIterable[Tuple[K, V]])
|
|
|
369
427
|
for k, task in dict(self).items():
|
|
370
428
|
if task.done():
|
|
371
429
|
yield k, await task
|
|
372
|
-
|
|
430
|
+
|
|
373
431
|
@ASyncMethodDescriptorSyncDefault
|
|
374
432
|
async def gather(
|
|
375
|
-
self,
|
|
376
|
-
return_exceptions: bool = False,
|
|
433
|
+
self,
|
|
434
|
+
return_exceptions: bool = False,
|
|
377
435
|
exclude_if: Excluder[V] = None,
|
|
378
436
|
tqdm: bool = False,
|
|
379
437
|
**tqdm_kwargs: Any,
|
|
@@ -382,19 +440,53 @@ class TaskMapping(DefaultDict[K, "asyncio.Task[V]"], AsyncIterable[Tuple[K, V]])
|
|
|
382
440
|
if self._init_loader:
|
|
383
441
|
await self._init_loader
|
|
384
442
|
self._raise_if_empty()
|
|
385
|
-
return await gather(
|
|
386
|
-
|
|
443
|
+
return await gather(
|
|
444
|
+
self,
|
|
445
|
+
return_exceptions=return_exceptions,
|
|
446
|
+
exclude_if=exclude_if,
|
|
447
|
+
tqdm=tqdm,
|
|
448
|
+
**tqdm_kwargs,
|
|
449
|
+
)
|
|
450
|
+
|
|
387
451
|
@overload
|
|
388
|
-
def pop(
|
|
452
|
+
def pop(
|
|
453
|
+
self, item: K, *, cancel: bool = False
|
|
454
|
+
) -> "Union[asyncio.Task[V], asyncio.Future[V]]":
|
|
455
|
+
"""Pop a task from the TaskMapping.
|
|
456
|
+
|
|
457
|
+
Args:
|
|
458
|
+
item: The key to pop.
|
|
459
|
+
cancel: Whether to cancel the task when popping it.
|
|
460
|
+
"""
|
|
461
|
+
|
|
389
462
|
@overload
|
|
390
|
-
def pop(
|
|
391
|
-
|
|
463
|
+
def pop(
|
|
464
|
+
self, item: K, default: K, *, cancel: bool = False
|
|
465
|
+
) -> "Union[asyncio.Task[V], asyncio.Future[V]]":
|
|
466
|
+
"""Pop a task from the TaskMapping.
|
|
467
|
+
|
|
468
|
+
Args:
|
|
469
|
+
item: The key to pop.
|
|
470
|
+
default: The default value to return if no matching key is found.
|
|
471
|
+
cancel: Whether to cancel the task when popping it.
|
|
472
|
+
"""
|
|
473
|
+
|
|
474
|
+
def pop(
|
|
475
|
+
self, *args: K, cancel: bool = False
|
|
476
|
+
) -> "Union[asyncio.Task[V], asyncio.Future[V]]":
|
|
477
|
+
"""Pop a task from the TaskMapping.
|
|
478
|
+
|
|
479
|
+
Args:
|
|
480
|
+
*args: One key to pop.
|
|
481
|
+
cancel: Whether to cancel the task when popping it.
|
|
482
|
+
"""
|
|
392
483
|
fut_or_task = super().pop(*args)
|
|
393
484
|
if cancel:
|
|
394
485
|
fut_or_task.cancel()
|
|
395
486
|
return fut_or_task
|
|
396
|
-
|
|
487
|
+
|
|
397
488
|
def clear(self, cancel: bool = False) -> None:
|
|
489
|
+
"""# TODO write docs for this"""
|
|
398
490
|
if cancel and self._init_loader and not self._init_loader.done():
|
|
399
491
|
logger.debug("cancelling %s", self._init_loader)
|
|
400
492
|
# temporary, remove later
|
|
@@ -412,106 +504,125 @@ class TaskMapping(DefaultDict[K, "asyncio.Task[V]"], AsyncIterable[Tuple[K, V]])
|
|
|
412
504
|
def _init_loader(self) -> Optional["asyncio.Task[None]"]:
|
|
413
505
|
if self.__init_loader_coro:
|
|
414
506
|
logger.debug("starting %s init loader", self)
|
|
415
|
-
name=f"{type(self).__name__} init loader loading {self.__iterables__} for {self}"
|
|
507
|
+
name = f"{type(self).__name__} init loader loading {self.__iterables__} for {self}"
|
|
416
508
|
try:
|
|
417
509
|
task = create_task(coro=self.__init_loader_coro, name=name)
|
|
418
510
|
except RuntimeError as e:
|
|
419
511
|
raise _NoRunningLoop if str(e) == "no running event loop" else e
|
|
420
512
|
task.add_done_callback(self.__cleanup)
|
|
421
513
|
return task
|
|
422
|
-
|
|
514
|
+
|
|
423
515
|
@functools.cached_property
|
|
424
516
|
def _queue(self) -> ProcessingQueue:
|
|
425
517
|
fn = functools.partial(self._wrapped_func, **self._wrapped_func_kwargs)
|
|
426
518
|
return ProcessingQueue(fn, self.concurrency, name=self._name)
|
|
427
|
-
|
|
428
|
-
def _raise_if_empty(self, msg: str =
|
|
519
|
+
|
|
520
|
+
def _raise_if_empty(self, msg: str = "") -> None:
|
|
429
521
|
if not self:
|
|
430
522
|
raise exceptions.MappingIsEmptyError(self, msg)
|
|
431
|
-
|
|
523
|
+
|
|
432
524
|
def _raise_if_not_empty(self) -> None:
|
|
433
525
|
if self:
|
|
434
526
|
raise exceptions.MappingNotEmptyError(self)
|
|
435
527
|
|
|
436
528
|
@ASyncGeneratorFunction
|
|
437
|
-
async def _tasks_for_iterables(
|
|
529
|
+
async def _tasks_for_iterables(
|
|
530
|
+
self, *iterables: AnyIterableOrAwaitableIterable[K]
|
|
531
|
+
) -> AsyncIterator[Tuple[K, "asyncio.Task[V]"]]:
|
|
438
532
|
"""Ensure tasks are running for each key in the provided iterables."""
|
|
439
533
|
# if we have any regular containers we can yield their contents right away
|
|
440
|
-
containers = [
|
|
534
|
+
containers = [
|
|
535
|
+
iterable
|
|
536
|
+
for iterable in iterables
|
|
537
|
+
if not isinstance(iterable, AsyncIterable)
|
|
538
|
+
and isinstance(iterable, Iterable)
|
|
539
|
+
]
|
|
441
540
|
for iterable in containers:
|
|
442
541
|
async for key in _yield_keys(iterable):
|
|
443
542
|
yield key, self[key]
|
|
444
|
-
|
|
445
|
-
if remaining := [
|
|
543
|
+
|
|
544
|
+
if remaining := [
|
|
545
|
+
iterable for iterable in iterables if iterable not in containers
|
|
546
|
+
]:
|
|
446
547
|
try:
|
|
447
|
-
async for key in as_yielded(*[_yield_keys(iterable) for iterable in remaining]):
|
|
548
|
+
async for key in as_yielded(*[_yield_keys(iterable) for iterable in remaining]): # type: ignore [attr-defined]
|
|
448
549
|
yield key, self[key] # ensure task is running
|
|
449
550
|
except _EmptySequenceError:
|
|
450
551
|
if len(iterables) == 1:
|
|
451
552
|
raise
|
|
452
|
-
raise RuntimeError(
|
|
453
|
-
|
|
553
|
+
raise RuntimeError(
|
|
554
|
+
"DEV: figure out how to handle this situation"
|
|
555
|
+
) from None
|
|
556
|
+
|
|
454
557
|
def _if_pop_check_destroyed(self, pop: bool) -> None:
|
|
455
558
|
if pop:
|
|
456
559
|
if self._destroyed:
|
|
457
560
|
raise RuntimeError(f"{self} has already been consumed")
|
|
458
561
|
self._destroyed = True
|
|
459
|
-
|
|
562
|
+
|
|
460
563
|
async def _if_pop_clear(self, pop: bool) -> None:
|
|
461
564
|
if pop:
|
|
462
565
|
self._destroyed = True
|
|
463
566
|
# _queue is a cached_property, we don't want to create it if it doesn't exist
|
|
464
|
-
if self.concurrency and
|
|
567
|
+
if self.concurrency and "_queue" in self.__dict__:
|
|
465
568
|
self._queue.close()
|
|
466
569
|
del self._queue
|
|
467
570
|
self.clear(cancel=True)
|
|
468
571
|
# we need to let the loop run once so the tasks can fully cancel
|
|
469
572
|
await asyncio.sleep(0)
|
|
470
|
-
|
|
573
|
+
|
|
471
574
|
async def _wait_for_next_key(self) -> None:
|
|
472
575
|
# NOTE if `_init_loader` has an exception it will return first, otherwise `_init_loader_next` will return always
|
|
473
576
|
done, pending = await asyncio.wait(
|
|
474
|
-
[
|
|
475
|
-
|
|
577
|
+
[
|
|
578
|
+
create_task(self._init_loader_next(), log_destroy_pending=False),
|
|
579
|
+
self._init_loader,
|
|
580
|
+
],
|
|
581
|
+
return_when=asyncio.FIRST_COMPLETED,
|
|
476
582
|
)
|
|
477
583
|
for task in done:
|
|
478
584
|
# check for exceptions
|
|
479
585
|
await task
|
|
480
|
-
|
|
586
|
+
|
|
481
587
|
def __cleanup(self, t: "asyncio.Task[None]") -> None:
|
|
482
588
|
# clear the slot and let the bound Queue die
|
|
483
589
|
del self.__init_loader_coro
|
|
484
590
|
|
|
485
591
|
|
|
486
|
-
class _NoRunningLoop(Exception):
|
|
487
|
-
|
|
592
|
+
class _NoRunningLoop(Exception): ...
|
|
593
|
+
|
|
488
594
|
|
|
489
595
|
@overload
|
|
490
|
-
def _yield(
|
|
596
|
+
def _yield(
|
|
597
|
+
key: K, value: V, yields: Literal["keys"]
|
|
598
|
+
) -> K: ... # TODO write specific docs for this overload
|
|
491
599
|
@overload
|
|
492
|
-
def _yield(
|
|
493
|
-
|
|
600
|
+
def _yield(
|
|
601
|
+
key: K, value: V, yields: Literal["both"]
|
|
602
|
+
) -> Tuple[K, V]: ... # TODO write specific docs for this overload
|
|
603
|
+
def _yield(key: K, value: V, yields: Literal["keys", "both"]) -> Union[K, Tuple[K, V]]:
|
|
494
604
|
"""
|
|
495
605
|
Yield either the key, value, or both based on the 'yields' parameter.
|
|
496
|
-
|
|
606
|
+
|
|
497
607
|
Args:
|
|
498
608
|
key: The key of the task.
|
|
499
609
|
value: The result of the task.
|
|
500
610
|
yields: Determines what to yield; 'keys' for keys, 'both' for key-value pairs.
|
|
501
|
-
|
|
611
|
+
|
|
502
612
|
Returns:
|
|
503
613
|
The key, the value, or a tuple of both based on the 'yields' parameter.
|
|
504
614
|
"""
|
|
505
|
-
if yields ==
|
|
615
|
+
if yields == "both":
|
|
506
616
|
return key, value
|
|
507
|
-
elif yields ==
|
|
617
|
+
elif yields == "keys":
|
|
508
618
|
return key
|
|
509
619
|
else:
|
|
510
620
|
raise ValueError(f"`yields` must be 'keys' or 'both'. You passed {yields}")
|
|
511
621
|
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
622
|
+
|
|
623
|
+
class _EmptySequenceError(ValueError): ...
|
|
624
|
+
|
|
625
|
+
|
|
515
626
|
async def _yield_keys(iterable: AnyIterableOrAwaitableIterable[K]) -> AsyncIterator[K]:
|
|
516
627
|
"""
|
|
517
628
|
Asynchronously yield keys from the provided iterable.
|
|
@@ -536,9 +647,15 @@ async def _yield_keys(iterable: AnyIterableOrAwaitableIterable[K]) -> AsyncItera
|
|
|
536
647
|
else:
|
|
537
648
|
raise TypeError(iterable)
|
|
538
649
|
|
|
650
|
+
|
|
539
651
|
__unwrapped = weakref.WeakKeyDictionary()
|
|
540
652
|
|
|
541
|
-
|
|
653
|
+
|
|
654
|
+
def _unwrap(
|
|
655
|
+
wrapped_func: Union[
|
|
656
|
+
AnyFn[P, T], "ASyncMethodDescriptor[P, T]", _ASyncPropertyDescriptorBase[I, T]
|
|
657
|
+
]
|
|
658
|
+
) -> Callable[P, Awaitable[T]]:
|
|
542
659
|
if unwrapped := __unwrapped.get(wrapped_func):
|
|
543
660
|
return unwrapped
|
|
544
661
|
if isinstance(wrapped_func, (ASyncBoundMethod, ASyncMethodDescriptor)):
|
|
@@ -548,7 +665,11 @@ def _unwrap(wrapped_func: Union[AnyFn[P, T], "ASyncMethodDescriptor[P, T]", _ASy
|
|
|
548
665
|
elif isinstance(wrapped_func, ASyncFunction):
|
|
549
666
|
# this speeds things up a bit by bypassing some logic
|
|
550
667
|
# TODO implement it like this elsewhere if profilers suggest
|
|
551
|
-
unwrapped =
|
|
668
|
+
unwrapped = (
|
|
669
|
+
wrapped_func._modified_fn
|
|
670
|
+
if wrapped_func._async_def
|
|
671
|
+
else wrapped_func._asyncified
|
|
672
|
+
)
|
|
552
673
|
else:
|
|
553
674
|
unwrapped = wrapped_func
|
|
554
675
|
__unwrapped[wrapped_func] = unwrapped
|
|
@@ -558,34 +679,58 @@ def _unwrap(wrapped_func: Union[AnyFn[P, T], "ASyncMethodDescriptor[P, T]", _ASy
|
|
|
558
679
|
_get_key: Callable[[Tuple[K, V]], K] = lambda k_and_v: k_and_v[0]
|
|
559
680
|
_get_value: Callable[[Tuple[K, V]], V] = lambda k_and_v: k_and_v[1]
|
|
560
681
|
|
|
682
|
+
|
|
561
683
|
class _TaskMappingView(ASyncGenericBase, Iterable[T], Generic[T, K, V]):
|
|
684
|
+
"""
|
|
685
|
+
Base class for TaskMapping views that provides common functionality.
|
|
686
|
+
"""
|
|
687
|
+
|
|
562
688
|
_get_from_item: Callable[[Tuple[K, V]], T]
|
|
563
689
|
_pop: bool = False
|
|
564
|
-
|
|
690
|
+
|
|
691
|
+
def __init__(
|
|
692
|
+
self, view: Iterable[T], task_mapping: TaskMapping[K, V], pop: bool = False
|
|
693
|
+
) -> None:
|
|
565
694
|
self.__view__ = view
|
|
566
695
|
self.__mapping__: TaskMapping = weakref.proxy(task_mapping)
|
|
567
696
|
"actually a weakref.ProxyType[TaskMapping] but then type hints weren't working"
|
|
568
697
|
if pop:
|
|
569
698
|
self._pop = True
|
|
699
|
+
|
|
570
700
|
def __iter__(self) -> Iterator[T]:
|
|
571
701
|
return iter(self.__view__)
|
|
702
|
+
|
|
572
703
|
def __await__(self) -> Generator[Any, None, List[T]]:
|
|
573
704
|
return self._await().__await__()
|
|
705
|
+
|
|
574
706
|
def __len__(self) -> int:
|
|
575
707
|
return len(self.__view__)
|
|
708
|
+
|
|
576
709
|
async def _await(self) -> List[T]:
|
|
577
710
|
return [result async for result in self]
|
|
711
|
+
|
|
578
712
|
__slots__ = "__view__", "__mapping__"
|
|
713
|
+
|
|
579
714
|
async def aiterbykeys(self, reverse: bool = False) -> ASyncIterator[T]:
|
|
580
|
-
async for tup in ASyncSorter(
|
|
715
|
+
async for tup in ASyncSorter(
|
|
716
|
+
self.__mapping__.items(pop=self._pop), key=_get_key, reverse=reverse
|
|
717
|
+
):
|
|
581
718
|
yield self._get_from_item(tup)
|
|
719
|
+
|
|
582
720
|
async def aiterbyvalues(self, reverse: bool = False) -> ASyncIterator[T]:
|
|
583
|
-
async for tup in ASyncSorter(
|
|
721
|
+
async for tup in ASyncSorter(
|
|
722
|
+
self.__mapping__.items(pop=self._pop), key=_get_value, reverse=reverse
|
|
723
|
+
):
|
|
584
724
|
yield self._get_from_item(tup)
|
|
585
725
|
|
|
586
726
|
|
|
587
727
|
class TaskMappingKeys(_TaskMappingView[K, K, V], Generic[K, V]):
|
|
728
|
+
"""
|
|
729
|
+
Asynchronous view to iterate over the keys of a TaskMapping.
|
|
730
|
+
"""
|
|
731
|
+
|
|
588
732
|
_get_from_item = lambda self, item: _get_key(item)
|
|
733
|
+
|
|
589
734
|
async def __aiter__(self) -> AsyncIterator[K]:
|
|
590
735
|
# strongref
|
|
591
736
|
mapping = self.__mapping__
|
|
@@ -610,6 +755,7 @@ class TaskMappingKeys(_TaskMappingView[K, K, V], Generic[K, V]):
|
|
|
610
755
|
for key in self.__load_existing():
|
|
611
756
|
if key not in yielded:
|
|
612
757
|
yield key
|
|
758
|
+
|
|
613
759
|
def __load_existing(self) -> Iterator[K]:
|
|
614
760
|
# strongref
|
|
615
761
|
mapping = self.__mapping__
|
|
@@ -620,6 +766,7 @@ class TaskMappingKeys(_TaskMappingView[K, K, V], Generic[K, V]):
|
|
|
620
766
|
else:
|
|
621
767
|
for key in tuple(mapping):
|
|
622
768
|
yield key
|
|
769
|
+
|
|
623
770
|
async def __load_init_loader(self, yielded: Set[K]) -> AsyncIterator[K]:
|
|
624
771
|
# strongref
|
|
625
772
|
mapping = self.__mapping__
|
|
@@ -637,8 +784,14 @@ class TaskMappingKeys(_TaskMappingView[K, K, V], Generic[K, V]):
|
|
|
637
784
|
# check for any exceptions
|
|
638
785
|
await mapping._init_loader
|
|
639
786
|
|
|
787
|
+
|
|
640
788
|
class TaskMappingItems(_TaskMappingView[Tuple[K, V], K, V], Generic[K, V]):
|
|
789
|
+
"""
|
|
790
|
+
Asynchronous view to iterate over the items (key-value pairs) of a TaskMapping.
|
|
791
|
+
"""
|
|
792
|
+
|
|
641
793
|
_get_from_item = lambda self, item: item
|
|
794
|
+
|
|
642
795
|
async def __aiter__(self) -> AsyncIterator[Tuple[K, V]]:
|
|
643
796
|
# strongref
|
|
644
797
|
mapping = self.__mapping__
|
|
@@ -649,9 +802,15 @@ class TaskMappingItems(_TaskMappingView[Tuple[K, V], K, V], Generic[K, V]):
|
|
|
649
802
|
else:
|
|
650
803
|
async for key in mapping.keys():
|
|
651
804
|
yield key, await mapping[key]
|
|
652
|
-
|
|
805
|
+
|
|
806
|
+
|
|
653
807
|
class TaskMappingValues(_TaskMappingView[V, K, V], Generic[K, V]):
|
|
808
|
+
"""
|
|
809
|
+
Asynchronous view to iterate over the values of a TaskMapping.
|
|
810
|
+
"""
|
|
811
|
+
|
|
654
812
|
_get_from_item = lambda self, item: _get_value(item)
|
|
813
|
+
|
|
655
814
|
async def __aiter__(self) -> AsyncIterator[V]:
|
|
656
815
|
# strongref
|
|
657
816
|
mapping = self.__mapping__
|
|
@@ -664,4 +823,10 @@ class TaskMappingValues(_TaskMappingView[V, K, V], Generic[K, V]):
|
|
|
664
823
|
yield await mapping[key]
|
|
665
824
|
|
|
666
825
|
|
|
667
|
-
__all__ = [
|
|
826
|
+
__all__ = [
|
|
827
|
+
"create_task",
|
|
828
|
+
"TaskMapping",
|
|
829
|
+
"TaskMappingKeys",
|
|
830
|
+
"TaskMappingValues",
|
|
831
|
+
"TaskMappingItems",
|
|
832
|
+
]
|