ez-a-sync 0.32.9__cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ez-a-sync might be problematic. Click here for more details.

Files changed (177) hide show
  1. a_sync/ENVIRONMENT_VARIABLES.py +42 -0
  2. a_sync/__init__.pxd +2 -0
  3. a_sync/__init__.py +144 -0
  4. a_sync/_smart.c +22623 -0
  5. a_sync/_smart.cpython-39-x86_64-linux-gnu.so +0 -0
  6. a_sync/_smart.pxd +2 -0
  7. a_sync/_smart.pyi +202 -0
  8. a_sync/_smart.pyx +652 -0
  9. a_sync/_typing.py +258 -0
  10. a_sync/a_sync/__init__.py +60 -0
  11. a_sync/a_sync/_descriptor.c +19745 -0
  12. a_sync/a_sync/_descriptor.cpython-39-x86_64-linux-gnu.so +0 -0
  13. a_sync/a_sync/_descriptor.pyi +34 -0
  14. a_sync/a_sync/_descriptor.pyx +422 -0
  15. a_sync/a_sync/_flags.c +5687 -0
  16. a_sync/a_sync/_flags.cpython-39-x86_64-linux-gnu.so +0 -0
  17. a_sync/a_sync/_flags.pxd +3 -0
  18. a_sync/a_sync/_flags.pyx +92 -0
  19. a_sync/a_sync/_helpers.c +13718 -0
  20. a_sync/a_sync/_helpers.cpython-39-x86_64-linux-gnu.so +0 -0
  21. a_sync/a_sync/_helpers.pxd +3 -0
  22. a_sync/a_sync/_helpers.pyi +10 -0
  23. a_sync/a_sync/_helpers.pyx +167 -0
  24. a_sync/a_sync/_kwargs.c +10672 -0
  25. a_sync/a_sync/_kwargs.cpython-39-x86_64-linux-gnu.so +0 -0
  26. a_sync/a_sync/_kwargs.pxd +2 -0
  27. a_sync/a_sync/_kwargs.pyx +64 -0
  28. a_sync/a_sync/_meta.py +210 -0
  29. a_sync/a_sync/abstract.c +11350 -0
  30. a_sync/a_sync/abstract.cpython-39-x86_64-linux-gnu.so +0 -0
  31. a_sync/a_sync/abstract.pyi +141 -0
  32. a_sync/a_sync/abstract.pyx +221 -0
  33. a_sync/a_sync/base.c +14066 -0
  34. a_sync/a_sync/base.cpython-39-x86_64-linux-gnu.so +0 -0
  35. a_sync/a_sync/base.pyi +60 -0
  36. a_sync/a_sync/base.pyx +271 -0
  37. a_sync/a_sync/config.py +168 -0
  38. a_sync/a_sync/decorator.py +562 -0
  39. a_sync/a_sync/flags.c +4471 -0
  40. a_sync/a_sync/flags.cpython-39-x86_64-linux-gnu.so +0 -0
  41. a_sync/a_sync/flags.pxd +72 -0
  42. a_sync/a_sync/flags.pyi +74 -0
  43. a_sync/a_sync/flags.pyx +72 -0
  44. a_sync/a_sync/function.c +37747 -0
  45. a_sync/a_sync/function.cpython-39-x86_64-linux-gnu.so +0 -0
  46. a_sync/a_sync/function.pxd +24 -0
  47. a_sync/a_sync/function.pyi +556 -0
  48. a_sync/a_sync/function.pyx +1363 -0
  49. a_sync/a_sync/method.c +28387 -0
  50. a_sync/a_sync/method.cpython-39-x86_64-linux-gnu.so +0 -0
  51. a_sync/a_sync/method.pxd +2 -0
  52. a_sync/a_sync/method.pyi +524 -0
  53. a_sync/a_sync/method.pyx +999 -0
  54. a_sync/a_sync/modifiers/__init__.pxd +1 -0
  55. a_sync/a_sync/modifiers/__init__.py +101 -0
  56. a_sync/a_sync/modifiers/cache/__init__.py +160 -0
  57. a_sync/a_sync/modifiers/cache/memory.py +165 -0
  58. a_sync/a_sync/modifiers/limiter.py +132 -0
  59. a_sync/a_sync/modifiers/manager.c +15262 -0
  60. a_sync/a_sync/modifiers/manager.cpython-39-x86_64-linux-gnu.so +0 -0
  61. a_sync/a_sync/modifiers/manager.pxd +5 -0
  62. a_sync/a_sync/modifiers/manager.pyi +219 -0
  63. a_sync/a_sync/modifiers/manager.pyx +296 -0
  64. a_sync/a_sync/modifiers/semaphores.py +173 -0
  65. a_sync/a_sync/property.c +27952 -0
  66. a_sync/a_sync/property.cpython-39-x86_64-linux-gnu.so +0 -0
  67. a_sync/a_sync/property.pyi +376 -0
  68. a_sync/a_sync/property.pyx +813 -0
  69. a_sync/a_sync/singleton.py +63 -0
  70. a_sync/aliases.py +3 -0
  71. a_sync/async_property/__init__.pxd +1 -0
  72. a_sync/async_property/__init__.py +1 -0
  73. a_sync/async_property/cached.c +20542 -0
  74. a_sync/async_property/cached.cpython-39-x86_64-linux-gnu.so +0 -0
  75. a_sync/async_property/cached.pxd +10 -0
  76. a_sync/async_property/cached.pyi +45 -0
  77. a_sync/async_property/cached.pyx +178 -0
  78. a_sync/async_property/proxy.c +36410 -0
  79. a_sync/async_property/proxy.cpython-39-x86_64-linux-gnu.so +0 -0
  80. a_sync/async_property/proxy.pxd +2 -0
  81. a_sync/async_property/proxy.pyi +124 -0
  82. a_sync/async_property/proxy.pyx +474 -0
  83. a_sync/asyncio/__init__.pxd +6 -0
  84. a_sync/asyncio/__init__.py +164 -0
  85. a_sync/asyncio/as_completed.c +18198 -0
  86. a_sync/asyncio/as_completed.cpython-39-x86_64-linux-gnu.so +0 -0
  87. a_sync/asyncio/as_completed.pxd +8 -0
  88. a_sync/asyncio/as_completed.pyi +109 -0
  89. a_sync/asyncio/as_completed.pyx +269 -0
  90. a_sync/asyncio/create_task.c +15032 -0
  91. a_sync/asyncio/create_task.cpython-39-x86_64-linux-gnu.so +0 -0
  92. a_sync/asyncio/create_task.pxd +2 -0
  93. a_sync/asyncio/create_task.pyi +51 -0
  94. a_sync/asyncio/create_task.pyx +268 -0
  95. a_sync/asyncio/gather.c +15735 -0
  96. a_sync/asyncio/gather.cpython-39-x86_64-linux-gnu.so +0 -0
  97. a_sync/asyncio/gather.pyi +107 -0
  98. a_sync/asyncio/gather.pyx +218 -0
  99. a_sync/asyncio/igather.c +12334 -0
  100. a_sync/asyncio/igather.cpython-39-x86_64-linux-gnu.so +0 -0
  101. a_sync/asyncio/igather.pxd +1 -0
  102. a_sync/asyncio/igather.pyi +7 -0
  103. a_sync/asyncio/igather.pyx +175 -0
  104. a_sync/asyncio/sleep.c +8916 -0
  105. a_sync/asyncio/sleep.cpython-39-x86_64-linux-gnu.so +0 -0
  106. a_sync/asyncio/sleep.pyi +14 -0
  107. a_sync/asyncio/sleep.pyx +49 -0
  108. a_sync/debugging.c +15156 -0
  109. a_sync/debugging.cpython-39-x86_64-linux-gnu.so +0 -0
  110. a_sync/debugging.pyi +73 -0
  111. a_sync/debugging.pyx +107 -0
  112. a_sync/exceptions.c +12952 -0
  113. a_sync/exceptions.cpython-39-x86_64-linux-gnu.so +0 -0
  114. a_sync/exceptions.pyi +376 -0
  115. a_sync/exceptions.pyx +443 -0
  116. a_sync/executor.py +575 -0
  117. a_sync/functools.c +11489 -0
  118. a_sync/functools.cpython-39-x86_64-linux-gnu.so +0 -0
  119. a_sync/functools.pxd +7 -0
  120. a_sync/functools.pyi +33 -0
  121. a_sync/functools.pyx +139 -0
  122. a_sync/future.py +1497 -0
  123. a_sync/iter.c +37567 -0
  124. a_sync/iter.cpython-39-x86_64-linux-gnu.so +0 -0
  125. a_sync/iter.pxd +9 -0
  126. a_sync/iter.pyi +366 -0
  127. a_sync/iter.pyx +981 -0
  128. a_sync/primitives/__init__.pxd +1 -0
  129. a_sync/primitives/__init__.py +53 -0
  130. a_sync/primitives/_debug.c +14737 -0
  131. a_sync/primitives/_debug.cpython-39-x86_64-linux-gnu.so +0 -0
  132. a_sync/primitives/_debug.pxd +12 -0
  133. a_sync/primitives/_debug.pyi +52 -0
  134. a_sync/primitives/_debug.pyx +223 -0
  135. a_sync/primitives/_loggable.c +10569 -0
  136. a_sync/primitives/_loggable.cpython-39-x86_64-linux-gnu.so +0 -0
  137. a_sync/primitives/_loggable.pxd +4 -0
  138. a_sync/primitives/_loggable.pyi +66 -0
  139. a_sync/primitives/_loggable.pyx +102 -0
  140. a_sync/primitives/locks/__init__.pxd +8 -0
  141. a_sync/primitives/locks/__init__.py +17 -0
  142. a_sync/primitives/locks/counter.c +16972 -0
  143. a_sync/primitives/locks/counter.cpython-39-x86_64-linux-gnu.so +0 -0
  144. a_sync/primitives/locks/counter.pxd +12 -0
  145. a_sync/primitives/locks/counter.pyi +151 -0
  146. a_sync/primitives/locks/counter.pyx +260 -0
  147. a_sync/primitives/locks/event.c +16127 -0
  148. a_sync/primitives/locks/event.cpython-39-x86_64-linux-gnu.so +0 -0
  149. a_sync/primitives/locks/event.pxd +22 -0
  150. a_sync/primitives/locks/event.pyi +43 -0
  151. a_sync/primitives/locks/event.pyx +183 -0
  152. a_sync/primitives/locks/prio_semaphore.c +24084 -0
  153. a_sync/primitives/locks/prio_semaphore.cpython-39-x86_64-linux-gnu.so +0 -0
  154. a_sync/primitives/locks/prio_semaphore.pxd +24 -0
  155. a_sync/primitives/locks/prio_semaphore.pyi +217 -0
  156. a_sync/primitives/locks/prio_semaphore.pyx +554 -0
  157. a_sync/primitives/locks/semaphore.c +25816 -0
  158. a_sync/primitives/locks/semaphore.cpython-39-x86_64-linux-gnu.so +0 -0
  159. a_sync/primitives/locks/semaphore.pxd +21 -0
  160. a_sync/primitives/locks/semaphore.pyi +196 -0
  161. a_sync/primitives/locks/semaphore.pyx +452 -0
  162. a_sync/primitives/queue.py +1018 -0
  163. a_sync/py.typed +0 -0
  164. a_sync/sphinx/__init__.py +3 -0
  165. a_sync/sphinx/ext.py +289 -0
  166. a_sync/task.py +916 -0
  167. a_sync/utils/__init__.py +105 -0
  168. a_sync/utils/iterators.py +297 -0
  169. a_sync/utils/repr.c +14354 -0
  170. a_sync/utils/repr.cpython-39-x86_64-linux-gnu.so +0 -0
  171. a_sync/utils/repr.pyi +2 -0
  172. a_sync/utils/repr.pyx +73 -0
  173. ez_a_sync-0.32.9.dist-info/METADATA +367 -0
  174. ez_a_sync-0.32.9.dist-info/RECORD +177 -0
  175. ez_a_sync-0.32.9.dist-info/WHEEL +6 -0
  176. ez_a_sync-0.32.9.dist-info/licenses/LICENSE.txt +17 -0
  177. ez_a_sync-0.32.9.dist-info/top_level.txt +1 -0
a_sync/task.py ADDED
@@ -0,0 +1,916 @@
1
+ """
2
+ This module provides asynchronous task management utilities, specifically focused on creating and handling mappings of tasks.
3
+
4
+ The main components include:
5
+ - TaskMapping: A class for managing and asynchronously generating tasks based on input iterables.
6
+ - TaskMappingKeys: A view to asynchronously iterate over the keys of a TaskMapping.
7
+ - TaskMappingValues: A view to asynchronously iterate over the values of a TaskMapping.
8
+ - TaskMappingItems: A view to asynchronously iterate over the items (key-value pairs) of a TaskMapping.
9
+ """
10
+
11
+ from asyncio import FIRST_COMPLETED, Future, Task, sleep, wait
12
+ from functools import wraps
13
+ from inspect import getfullargspec, isawaitable
14
+ from itertools import filterfalse
15
+ from logging import getLogger
16
+ from weakref import WeakKeyDictionary, proxy
17
+
18
+ from a_sync import exceptions
19
+ from a_sync._typing import *
20
+ from a_sync.a_sync._kwargs import _get_flag_name
21
+ from a_sync.a_sync.base import ASyncGenericBase
22
+ from a_sync.a_sync.function import ASyncFunction
23
+ from a_sync.a_sync.method import (
24
+ ASyncBoundMethod,
25
+ ASyncMethodDescriptor,
26
+ ASyncMethodDescriptorSyncDefault,
27
+ )
28
+ from a_sync.a_sync.property import _ASyncPropertyDescriptorBase
29
+ from a_sync.asyncio import as_completed, create_task, gather
30
+ from a_sync.asyncio.gather import Excluder
31
+ from a_sync.asyncio.sleep import sleep0 as yield_to_loop
32
+ from a_sync.functools import cached_property_unsafe
33
+ from a_sync.iter import ASyncIterator, ASyncGeneratorFunction, ASyncSorter
34
+ from a_sync.primitives.locks import Event
35
+ from a_sync.primitives.queue import Queue, ProcessingQueue
36
+ from a_sync.utils.iterators import as_yielded, exhaust_iterator
37
+ from a_sync.utils.repr import repr_trunc
38
+
39
+
40
+ logger = getLogger(__name__)
41
+
42
+
43
+ MappingFn = Callable[Concatenate[K, P], Awaitable[V]]
44
+
45
+
46
+ class TaskMapping(DefaultDict[K, "Task[V]"], AsyncIterable[Tuple[K, V]]):
47
+ """
48
+ A mapping of keys to asynchronous tasks with additional functionality.
49
+
50
+ `TaskMapping` is a specialized dictionary that maps keys to `asyncio` Tasks. It provides
51
+ convenient methods for creating, managing, and iterating over these tasks asynchronously.
52
+
53
+ Tasks are created automatically for each key using a provided function. You cannot manually set items in a `TaskMapping` using dictionary-like syntax.
54
+
55
+ Example:
56
+ >>> async def fetch_data(url: str) -> str:
57
+ ... async with aiohttp.ClientSession() as session:
58
+ ... async with session.get(url) as response:
59
+ ... return await response.text()
60
+ ...
61
+ >>> tasks = TaskMapping(fetch_data, ['http://example.com', 'https://www.python.org'], name='url_fetcher', concurrency=5)
62
+ >>> async for key, result in tasks:
63
+ ... print(f"Data for {key}: {result}")
64
+ ...
65
+ Data for python.org: http://python.org
66
+ Data for example.com: http://example.com
67
+
68
+ Note:
69
+ You cannot manually set items in a `TaskMapping` using dictionary-like syntax. Tasks are created and managed internally.
70
+
71
+ See Also:
72
+ - :class:`asyncio.Task`
73
+ - :func:`asyncio.create_task`
74
+ - :func:`a_sync.asyncio.create_task`
75
+ """
76
+
77
+ concurrency: Optional[int] = None
78
+ "The max number of tasks that will run at one time."
79
+
80
+ _destroyed: bool = False
81
+ "Boolean indicating whether his mapping has been consumed and is no longer usable for aggregations."
82
+
83
+ _init_loader: Optional["Task[None]"] = None
84
+ "An asyncio Task used to preload values from the iterables."
85
+
86
+ _init_loader_next: Optional[Callable[[], Awaitable[Tuple[Tuple[K, "Task[V]"]]]]] = None
87
+ "A coro function that blocks until the _init_loader starts a new task(s), and then returns a `Tuple[Tuple[K, Task[V]]]` with all of the new tasks and the keys that started them."
88
+
89
+ _name: Optional[str] = None
90
+ "Optional name for tasks created by this mapping."
91
+
92
+ _next: Event = None
93
+ "An asyncio Event that indicates the next result is ready"
94
+
95
+ _wrapped_func_kwargs: Dict[str, Any] = {}
96
+ "Additional keyword arguments passed to `_wrapped_func`."
97
+
98
+ __iterables__: Tuple[AnyIterableOrAwaitableIterable[K], ...] = ()
99
+ "The original iterables, if any, used to initialize this mapping."
100
+
101
+ __init_loader_coro: Optional[Awaitable[None]] = None
102
+ """An optional asyncio Coroutine to be run by the `_init_loader`"""
103
+
104
+ __slots__ = "_wrapped_func", "__wrapped__", "__dict__", "__weakref__"
105
+
106
+ # NOTE: maybe since we use so many classvars here we are better off getting rid of slots
107
+ def __init__(
108
+ self,
109
+ wrapped_func: MappingFn[K, P, V] = None,
110
+ *iterables: AnyIterableOrAwaitableIterable[K],
111
+ name: str = "",
112
+ concurrency: Optional[int] = None,
113
+ **wrapped_func_kwargs: P.kwargs,
114
+ ) -> None:
115
+ """
116
+ Initialize a TaskMapping instance.
117
+
118
+ Args:
119
+ wrapped_func: A callable that takes a key and additional parameters and returns an Awaitable.
120
+ *iterables: Any number of iterables whose elements will be used as keys for task generation.
121
+ name: An optional name for the tasks created by this mapping.
122
+ concurrency: Maximum number of tasks to run concurrently.
123
+ **wrapped_func_kwargs: Additional keyword arguments to be passed to wrapped_func.
124
+
125
+ Example:
126
+ async def process_item(item: int) -> int:
127
+ await asyncio.sleep(1)
128
+ return item * 2
129
+
130
+ task_map = TaskMapping(process_item, [1, 2, 3], concurrency=2)
131
+ """
132
+
133
+ if concurrency:
134
+ self.concurrency = concurrency
135
+
136
+ self.__wrapped__ = wrapped_func
137
+ "The original callable used to initialize this mapping without any modifications."
138
+
139
+ if iterables:
140
+ self.__iterables__ = iterables
141
+
142
+ wrapped_func = _unwrap(wrapped_func)
143
+ self._wrapped_func = wrapped_func
144
+ "The function used to create tasks for each key."
145
+
146
+ if isinstance(wrapped_func, ASyncMethodDescriptor) and not _get_flag_name(
147
+ wrapped_func_kwargs
148
+ ):
149
+ wrapped_func_kwargs["sync"] = False
150
+ if wrapped_func_kwargs:
151
+ self._wrapped_func_kwargs = wrapped_func_kwargs
152
+
153
+ if name:
154
+ self._name = name
155
+
156
+ self._next = Event(name=f"{self} `_next`")
157
+
158
+ if iterables:
159
+
160
+ @wraps(wrapped_func)
161
+ async def _wrapped_set_next(
162
+ *args: P.args, __a_sync_recursion: int = 0, **kwargs: P.kwargs
163
+ ) -> V:
164
+ try:
165
+ return await wrapped_func(*args, **kwargs)
166
+ except exceptions.SyncModeInAsyncContextError as e:
167
+ e.args = *e.args, f"wrapped:{self.__wrapped__}"
168
+ raise
169
+ except TypeError as e:
170
+ if __a_sync_recursion > 2 or not (
171
+ str(e).startswith(wrapped_func.__name__)
172
+ and "got multiple values for argument" in str(e)
173
+ ):
174
+ raise
175
+ # NOTE: args ordering is clashing with provided kwargs. We can handle this in a hacky way.
176
+ # TODO: perform this check earlier and pre-prepare the args/kwargs ordering
177
+ new_args = list(args)
178
+ new_kwargs = dict(kwargs)
179
+ try:
180
+ for i, arg in enumerate(getfullargspec(self.__wrapped__).args):
181
+ if arg in kwargs:
182
+ new_args.insert(i, new_kwargs.pop(arg))
183
+ else:
184
+ break
185
+ return await _wrapped_set_next(
186
+ *new_args,
187
+ **new_kwargs,
188
+ __a_sync_recursion=__a_sync_recursion + 1,
189
+ )
190
+ except TypeError as e2:
191
+ raise (
192
+ e.with_traceback(e.__traceback__)
193
+ if str(e2) == "unsupported callable"
194
+ else e2.with_traceback(e2.__traceback__)
195
+ )
196
+ finally:
197
+ self._next.set()
198
+ self._next.clear()
199
+
200
+ self._wrapped_func = _wrapped_set_next
201
+ init_loader_queue: Queue[Tuple[K, "Future[V]"]] = Queue()
202
+ self.__init_loader_coro = exhaust_iterator(
203
+ self._start_tasks_for_iterables(*iterables), queue=init_loader_queue
204
+ )
205
+ self._init_loader_next = init_loader_queue.get_all
206
+
207
+ try:
208
+ self._init_loader
209
+ except _NoRunningLoop:
210
+ # its okay if we get this exception, we can start the task as soon as the loop starts
211
+ pass
212
+
213
+ def __repr__(self) -> str:
214
+ return f"<{type(self).__name__} for {self._wrapped_func} kwargs={self._wrapped_func_kwargs} tasks={len(self)} at {hex(id(self))}>"
215
+
216
+ def __hash__(self) -> int:
217
+ return id(self)
218
+
219
+ def __setitem__(self, item: Any, value: Any) -> None:
220
+ raise NotImplementedError("You cannot manually set items in a TaskMapping")
221
+
222
+ def __getitem__(self, item: K) -> "Task[V]":
223
+ try:
224
+ return dict.__getitem__(self, item)
225
+ except KeyError:
226
+ return self.__start_task(item)
227
+
228
+ def __await__(self) -> Generator[Any, None, Dict[K, V]]:
229
+ """Wait for all tasks to complete and return a dictionary of the results."""
230
+ return self.gather(sync=False).__await__()
231
+
232
+ async def __aiter__(self, pop: bool = False) -> AsyncIterator[Tuple[K, V]]:
233
+ # sourcery skip: hoist-loop-from-if, hoist-similar-statement-from-if, hoist-statement-from-if
234
+ """Asynchronously iterate through all key-task pairs, yielding the key-result pair as each task completes."""
235
+
236
+ self._if_pop_check_destroyed(pop)
237
+
238
+ # if you inited the TaskMapping with some iterators, we will load those
239
+ yielded = set()
240
+ add_yielded = yielded.add
241
+ try:
242
+ if self._init_loader is None:
243
+ # if you didn't init the TaskMapping with iterators and you didn't start any tasks manually, we should fail
244
+ self._raise_if_empty()
245
+ else:
246
+ while not self._init_loader.done():
247
+ await self._wait_for_next_key()
248
+ while unyielded := tuple(key for key in self if key not in yielded):
249
+ if ready := tuple(key for key in unyielded if self[key].done()):
250
+ if pop:
251
+ self_pop = self.pop
252
+ for key in ready:
253
+ yield key, self_pop(key).result()
254
+ add_yielded(key)
255
+ else:
256
+ for key in ready:
257
+ yield key, self[key].result()
258
+ add_yielded(key)
259
+ else:
260
+ await self._next.wait()
261
+ # loader is already done by this point, but we need to check for exceptions
262
+ await self._init_loader
263
+ # if there are any tasks that still need to complete, we need to await them and yield them
264
+ if unyielded := {key: self[key] for key in self if key not in yielded}:
265
+ if pop:
266
+ self_pop = self.pop
267
+ async for key, value in as_completed(unyielded, aiter=True):
268
+ self_pop(key)
269
+ yield key, value
270
+ else:
271
+ async for key, value in as_completed(unyielded, aiter=True):
272
+ yield key, value
273
+ finally:
274
+ await self._if_pop_clear(pop)
275
+
276
+ def __delitem__(self, item: K) -> None:
277
+ task_or_fut = dict.__getitem__(self, item)
278
+ if not task_or_fut.done():
279
+ task_or_fut.cancel()
280
+ dict.__delitem__(self, item)
281
+
282
+ def keys(self, pop: bool = False) -> "TaskMappingKeys[K, V]":
283
+ return TaskMappingKeys(dict.keys(self), self, pop=pop)
284
+
285
+ def values(self, pop: bool = False) -> "TaskMappingValues[K, V]":
286
+ return TaskMappingValues(dict.values(self), self, pop=pop)
287
+
288
+ def items(self, pop: bool = False) -> "TaskMappingValues[K, V]":
289
+ return TaskMappingItems(dict.items(self), self, pop=pop)
290
+
291
+ async def close(self) -> None:
292
+ await self._if_pop_clear(True)
293
+
294
+ @ASyncGeneratorFunction
295
+ async def map(
296
+ self,
297
+ *iterables: AnyIterableOrAwaitableIterable[K],
298
+ pop: bool = True,
299
+ yields: Literal["keys", "both"] = "both",
300
+ ) -> AsyncIterator[Tuple[K, V]]:
301
+ # sourcery skip: hoist-similar-statement-from-if
302
+ """
303
+ Asynchronously map iterables to tasks and yield their results.
304
+
305
+ Args:
306
+ *iterables: Iterables to map over.
307
+ pop: Whether to remove tasks from the internal storage once they are completed.
308
+ yields: Whether to yield 'keys', 'values', or 'both' (key-value pairs).
309
+
310
+ Yields:
311
+ Depending on `yields`, either keys, values,
312
+ or tuples of key-value pairs representing the results of completed tasks.
313
+
314
+ Example:
315
+ async def process_item(item: int) -> int:
316
+ await asyncio.sleep(1)
317
+ return item * 2
318
+
319
+ task_map = TaskMapping(process_item)
320
+ async for key, result in task_map.map([1, 2, 3]):
321
+ print(f"Processed {key}: {result}")
322
+ """
323
+ self._if_pop_check_destroyed(pop)
324
+
325
+ # make sure the init loader is started if needed
326
+ init_loader = self._init_loader
327
+ if iterables and init_loader:
328
+ raise ValueError(
329
+ "You cannot pass `iterables` to map if the TaskMapping was initialized with an (a)iterable."
330
+ )
331
+
332
+ try:
333
+ if iterables:
334
+ self._raise_if_not_empty()
335
+
336
+ def callback(t: Task):
337
+ self._next.set()
338
+
339
+ try:
340
+ async for k, t in self._tasks_for_iterables(*iterables):
341
+ t.add_done_callback(callback)
342
+ if self._next.is_set():
343
+ async for key, value in self.yield_completed(pop=pop):
344
+ yield _yield(key, value, yields)
345
+ self._next.clear()
346
+ except _EmptySequenceError:
347
+ if len(iterables) > 1:
348
+ # TODO gotta handle this situation
349
+ raise exceptions.EmptySequenceError(
350
+ "bob needs to code something so you can do this, go tell him"
351
+ ) from None
352
+ # just pass thru
353
+ finally:
354
+ self._next.clear()
355
+
356
+ elif init_loader:
357
+ # check for exceptions if you passed an iterable(s) into the class init
358
+ await init_loader
359
+
360
+ else:
361
+ self._raise_if_empty(
362
+ "You must either initialize your TaskMapping with an iterable(s) or provide them during your call to map"
363
+ )
364
+
365
+ if self:
366
+ if pop:
367
+ self_pop = self.pop
368
+ async for key, value in as_completed(self, aiter=True):
369
+ self_pop(key)
370
+ yield _yield(key, value, yields)
371
+ else:
372
+ async for key, value in as_completed(self, aiter=True):
373
+ yield _yield(key, value, yields)
374
+ finally:
375
+ await self._if_pop_clear(pop)
376
+
377
+ @ASyncMethodDescriptorSyncDefault
378
+ async def all(self, pop: bool = True) -> bool:
379
+ try:
380
+ async for key, result in self.__aiter__(pop=pop):
381
+ if not bool(result):
382
+ return False
383
+ return True
384
+ except _EmptySequenceError:
385
+ return True
386
+ finally:
387
+ await self._if_pop_clear(pop)
388
+
389
+ @ASyncMethodDescriptorSyncDefault
390
+ async def any(self, pop: bool = True) -> bool:
391
+ try:
392
+ async for key, result in self.__aiter__(pop=pop):
393
+ if bool(result):
394
+ return True
395
+ return False
396
+ except _EmptySequenceError:
397
+ return False
398
+ finally:
399
+ await self._if_pop_clear(pop)
400
+
401
+ @ASyncMethodDescriptorSyncDefault
402
+ async def max(self, pop: bool = True) -> V:
403
+ # sourcery skip: avoid-builtin-shadow
404
+ max = None
405
+ try:
406
+ async for key, result in self.__aiter__(pop=pop):
407
+ if max is None or result > max:
408
+ max = result
409
+ except _EmptySequenceError:
410
+ raise exceptions.EmptySequenceError("max() arg is an empty sequence") from None
411
+ if max is None:
412
+ raise exceptions.EmptySequenceError("max() arg is an empty sequence") from None
413
+ return max
414
+
415
+ @ASyncMethodDescriptorSyncDefault
416
+ async def min(self, pop: bool = True) -> V:
417
+ # sourcery skip: avoid-builtin-shadow
418
+ """Return the minimum result from the tasks in the mapping."""
419
+ min = None
420
+ try:
421
+ async for key, result in self.__aiter__(pop=pop):
422
+ if min is None or result < min:
423
+ min = result
424
+ except _EmptySequenceError:
425
+ raise exceptions.EmptySequenceError("min() arg is an empty sequence") from None
426
+ if min is None:
427
+ raise exceptions.EmptySequenceError("min() arg is an empty sequence") from None
428
+ return min
429
+
430
+ @ASyncMethodDescriptorSyncDefault
431
+ async def sum(self, pop: bool = False) -> V:
432
+ """Return the sum of the results from the tasks in the mapping."""
433
+ retval = 0
434
+ try:
435
+ async for key, result in self.__aiter__(pop=pop):
436
+ retval += result
437
+ except _EmptySequenceError:
438
+ return 0
439
+ return retval
440
+
441
+ @ASyncIterator.wrap
442
+ async def yield_completed(self, pop: bool = True) -> AsyncIterator[Tuple[K, V]]:
443
+ """
444
+ Asynchronously yield tuples of key-value pairs representing the results of any completed tasks.
445
+
446
+ Args:
447
+ pop: Whether to remove tasks from the internal storage once they are completed.
448
+
449
+ Yields:
450
+ Tuples of key-value pairs representing the results of completed tasks.
451
+
452
+ Example:
453
+ async def process_item(item: int) -> int:
454
+ await asyncio.sleep(1)
455
+ return item * 2
456
+
457
+ task_map = TaskMapping(process_item, [1, 2, 3])
458
+ async for key, result in task_map.yield_completed():
459
+ print(f"Completed {key}: {result}")
460
+ """
461
+ task: Task
462
+ if pop:
463
+ self_pop = self.pop
464
+ for k in tuple(k for k, task in dict.items(self) if task.done()):
465
+ yield k, self_pop(k).result()
466
+ else:
467
+ for k, task in dict.items(self):
468
+ if task.done():
469
+ yield k, task.result()
470
+
471
+ @ASyncMethodDescriptorSyncDefault
472
+ async def gather(
473
+ self,
474
+ return_exceptions: bool = False,
475
+ exclude_if: Excluder[V] = None,
476
+ tqdm: bool = False,
477
+ **tqdm_kwargs: Any,
478
+ ) -> Dict[K, V]:
479
+ """Wait for all tasks to complete and return a dictionary of the results."""
480
+ if self._init_loader:
481
+ await self._init_loader
482
+ self._raise_if_empty()
483
+ return await gather(
484
+ self,
485
+ return_exceptions=return_exceptions,
486
+ exclude_if=exclude_if,
487
+ tqdm=tqdm,
488
+ **tqdm_kwargs,
489
+ )
490
+
491
+ @overload
492
+ def pop(self, item: K, *, cancel: bool = False) -> "Union[Task[V], Future[V]]":
493
+ """Pop a task from the TaskMapping.
494
+
495
+ Args:
496
+ item: The key to pop.
497
+ cancel: Whether to cancel the task when popping it.
498
+ """
499
+
500
+ @overload
501
+ def pop(self, item: K, default: K, *, cancel: bool = False) -> "Union[Task[V], Future[V]]":
502
+ """Pop a task from the TaskMapping.
503
+
504
+ Args:
505
+ item: The key to pop.
506
+ default: The default value to return if no matching key is found.
507
+ cancel: Whether to cancel the task when popping it.
508
+ """
509
+
510
+ def pop(self, *args: K, cancel: bool = False) -> "Union[Task[V], Future[V]]":
511
+ """Pop a task from the TaskMapping.
512
+
513
+ Args:
514
+ *args: One key to pop.
515
+ cancel: Whether to cancel the task when popping it.
516
+ """
517
+ fut_or_task = dict.pop(self, *args)
518
+ if cancel:
519
+ fut_or_task.cancel()
520
+ return fut_or_task
521
+
522
+ def clear(self, cancel: bool = False) -> None:
523
+ """# TODO write docs for this"""
524
+ if cancel and self._init_loader and not self._init_loader.done():
525
+ logger.debug("cancelling %s", self._init_loader)
526
+ self._init_loader.cancel()
527
+ if keys := tuple(self.keys()):
528
+ logger.debug("popping remaining %s tasks", self)
529
+ pop = self.pop
530
+ for k in keys:
531
+ pop(k, cancel=cancel)
532
+
533
+ @cached_property_unsafe
534
+ def _init_loader(self) -> Optional["Task[None]"]:
535
+ # sourcery skip: raise-from-previous-error
536
+ if self.__init_loader_coro is None:
537
+ return None
538
+
539
+ logger.debug("starting %s init loader", self)
540
+ if len(iterables := self.__iterables__) == 1:
541
+ iterables_repr = repr_trunc(iterables[0])
542
+ else:
543
+ iterables_repr = f"({', '.join(map(repr_trunc, iterables))})"
544
+ try:
545
+ task = create_task(
546
+ coro=self.__init_loader_coro,
547
+ name=f"{type(self).__name__} init loader loading {iterables_repr} for {self}",
548
+ )
549
+ except RuntimeError as e:
550
+ raise _NoRunningLoop if str(e) == "no running event loop" else e
551
+ task.add_done_callback(self.__cleanup)
552
+ return task
553
+
554
+ @cached_property_unsafe
555
+ def _queue(self) -> ProcessingQueue:
556
+ fn = lambda arg: self._wrapped_func(arg, **self._wrapped_func_kwargs)
557
+ return ProcessingQueue(fn, self.concurrency, name=self._name)
558
+
559
+ def _raise_if_empty(self, msg: str = "") -> None:
560
+ if not self:
561
+ raise exceptions.MappingIsEmptyError(self, msg)
562
+
563
+ def _raise_if_not_empty(self) -> None:
564
+ if self:
565
+ raise exceptions.MappingNotEmptyError(self)
566
+
567
+ @ASyncGeneratorFunction
568
+ async def _tasks_for_iterables(
569
+ self, *iterables: AnyIterableOrAwaitableIterable[K]
570
+ ) -> AsyncIterator[Tuple[K, "Task[V]"]]:
571
+ """Ensure tasks are running for each key in the provided iterables."""
572
+ # if we have any regular containers we can yield their contents right away
573
+ containers = tuple(
574
+ iterable
575
+ for iterable in iterables
576
+ if not isinstance(iterable, AsyncIterable) and isinstance(iterable, Iterable)
577
+ )
578
+ for iterable in containers:
579
+ async for key in _yield_keys(iterable):
580
+ yield key, self[key]
581
+
582
+ if remaining := tuple(iterable for iterable in iterables if iterable not in containers):
583
+ try:
584
+ async for key in as_yielded(*(_yield_keys(iterable) for iterable in remaining)): # type: ignore [attr-defined]
585
+ yield key, self[key] # ensure task is running
586
+ except _EmptySequenceError:
587
+ if len(iterables) == 1:
588
+ raise
589
+ raise RuntimeError("DEV: figure out how to handle this situation") from None
590
+
591
+ @ASyncGeneratorFunction
592
+ async def _start_tasks_for_iterables(
593
+ self, *iterables: AnyIterableOrAwaitableIterable[K]
594
+ ) -> AsyncIterator[Tuple[K, "Task[V]"]]:
595
+ """Start new tasks for each key in the provided iterables."""
596
+ # if we have any regular containers we can yield their contents right away
597
+ containers = tuple(
598
+ iterable
599
+ for iterable in iterables
600
+ if not isinstance(iterable, AsyncIterable) and isinstance(iterable, Iterable)
601
+ )
602
+ for iterable in containers:
603
+ async for key in _yield_keys(iterable):
604
+ yield key, self.__start_task(key)
605
+
606
+ if remaining := tuple(iterable for iterable in iterables if iterable not in containers):
607
+ try:
608
+ async for key in as_yielded(*(_yield_keys(iterable) for iterable in remaining)): # type: ignore [attr-defined]
609
+ yield key, self.__start_task(key)
610
+ except _EmptySequenceError:
611
+ if len(iterables) == 1:
612
+ raise
613
+ raise RuntimeError("DEV: figure out how to handle this situation") from None
614
+
615
+ def _if_pop_check_destroyed(self, pop: bool) -> None:
616
+ if pop:
617
+ if self._destroyed:
618
+ raise RuntimeError(f"{self} has already been consumed")
619
+ self._destroyed = True
620
+
621
+ async def _if_pop_clear(self, pop: bool) -> None:
622
+ if pop:
623
+ self._destroyed = True
624
+ # _queue is a cached_property, we don't want to create it if it doesn't exist
625
+ if self.concurrency and "_queue" in self.__dict__:
626
+ self._queue.close()
627
+ del self._queue
628
+ self.clear(cancel=True)
629
+ # we need to let the loop run once so the tasks can fully cancel
630
+ await yield_to_loop()
631
+
632
+ async def _wait_for_next_key(self) -> None:
633
+ # NOTE if `_init_loader` has an exception it will return first, otherwise `_init_loader_next` will return always
634
+ done, pending = await wait(
635
+ (
636
+ create_task(self._init_loader_next(), log_destroy_pending=False),
637
+ self._init_loader,
638
+ ),
639
+ return_when=FIRST_COMPLETED,
640
+ )
641
+ task: Task
642
+ for task in done:
643
+ # check for exceptions
644
+ task.result()
645
+
646
+ def __start_task(self, item: K) -> "Future[V]":
647
+ if self.concurrency:
648
+ # NOTE: we use a queue instead of a Semaphore to reduce memory use for use cases involving many many tasks
649
+ fut = self._queue.put_nowait(item)
650
+ else:
651
+ fut = create_task(
652
+ coro=self._wrapped_func(item, **self._wrapped_func_kwargs),
653
+ name=f"{item}" if self._name is None else f"{self._name}[{item}]",
654
+ )
655
+ dict.__setitem__(self, item, fut)
656
+ return fut
657
+
658
+ def __cleanup(self, t: "Task[None]") -> None:
659
+ # clear the slot and let the bound Queue die
660
+ del self.__init_loader_coro
661
+
662
+
663
+ class _NoRunningLoop(Exception): ...
664
+
665
+
666
+ @overload
667
+ def _yield(
668
+ key: K, value: V, yields: Literal["keys"]
669
+ ) -> K: ... # TODO write specific docs for this overload
670
+ @overload
671
+ def _yield(
672
+ key: K, value: V, yields: Literal["both"]
673
+ ) -> Tuple[K, V]: ... # TODO write specific docs for this overload
674
+ def _yield(key: K, value: V, yields: Literal["keys", "both"]) -> Union[K, Tuple[K, V]]:
675
+ """
676
+ Yield either the key, value, or both based on the 'yields' parameter.
677
+
678
+ Args:
679
+ key: The key of the task.
680
+ value: The result of the task.
681
+ yields: Determines what to yield; 'keys' for keys, 'both' for key-value pairs.
682
+
683
+ Returns:
684
+ The key, the value, or a tuple of both based on the 'yields' parameter.
685
+ """
686
+ if yields == "both":
687
+ return key, value
688
+ elif yields == "keys":
689
+ return key
690
+ else:
691
+ raise ValueError(f"`yields` must be 'keys' or 'both'. You passed {yields}")
692
+
693
+
694
+ class _EmptySequenceError(ValueError): ...
695
+
696
+
697
+ async def _yield_keys(iterable: AnyIterableOrAwaitableIterable[K]) -> AsyncIterator[K]:
698
+ """
699
+ Asynchronously yield keys from the provided iterable.
700
+
701
+ Args:
702
+ iterable: An iterable that can be either synchronous or asynchronous.
703
+
704
+ Yields:
705
+ Keys extracted from the iterable.
706
+ """
707
+ if not iterable:
708
+ raise _EmptySequenceError(iterable)
709
+
710
+ elif isinstance(iterable, AsyncIterable):
711
+ async for key in iterable:
712
+ yield key
713
+
714
+ elif isinstance(iterable, Iterable):
715
+ yielded = 0
716
+ for key in iterable:
717
+ yield key
718
+ yielded += 1
719
+ if not yielded % 5_000: # arbitrary number, should be adjusted later
720
+ await yield_to_loop()
721
+
722
+ elif isawaitable(iterable):
723
+ async for key in _yield_keys(await iterable):
724
+ yield key
725
+
726
+ else:
727
+ raise TypeError(iterable)
728
+
729
+
730
+ __unwrapped = WeakKeyDictionary()
731
+
732
+
733
+ def _unwrap(
734
+ wrapped_func: Union[
735
+ AnyFn[P, T], "ASyncMethodDescriptor[P, T]", _ASyncPropertyDescriptorBase[I, T]
736
+ ],
737
+ ) -> Callable[P, Awaitable[T]]:
738
+ if unwrapped := __unwrapped.get(wrapped_func):
739
+ return unwrapped
740
+ if isinstance(wrapped_func, (ASyncBoundMethod, ASyncMethodDescriptor)):
741
+ unwrapped = wrapped_func
742
+ elif isinstance(wrapped_func, _ASyncPropertyDescriptorBase):
743
+ unwrapped = wrapped_func.get
744
+ elif isinstance(wrapped_func, ASyncFunction):
745
+ # this speeds things up a bit by bypassing some logic
746
+ # TODO implement it like this elsewhere if profilers suggest
747
+ unwrapped = (
748
+ wrapped_func._modified_fn if wrapped_func._async_def else wrapped_func._asyncified
749
+ )
750
+ else:
751
+ unwrapped = wrapped_func
752
+ __unwrapped[wrapped_func] = unwrapped
753
+ return unwrapped
754
+
755
+
756
+ _get_key: Callable[[Tuple[K, V]], K] = lambda k_and_v: k_and_v[0]
757
+ _get_value: Callable[[Tuple[K, V]], V] = lambda k_and_v: k_and_v[1]
758
+
759
+
760
+ class _TaskMappingView(ASyncGenericBase, Iterable[T], Generic[T, K, V]):
761
+ """
762
+ Base class for TaskMapping views that provides common functionality.
763
+ """
764
+
765
+ _get_from_item: Callable[[Tuple[K, V]], T]
766
+ _pop: bool = False
767
+
768
+ __slots__ = "__view__", "__mapping__"
769
+
770
+ def __init__(
771
+ self, view: Iterable[T], task_mapping: TaskMapping[K, V], pop: bool = False
772
+ ) -> None:
773
+ self.__view__ = view
774
+ self.__mapping__: TaskMapping = proxy(task_mapping)
775
+ "actually a weakref.ProxyType[TaskMapping] but then type hints weren't working"
776
+ if pop:
777
+ self._pop = True
778
+
779
+ def __iter__(self) -> Iterator[T]:
780
+ return iter(self.__view__)
781
+
782
+ def __await__(self) -> Generator[Any, None, List[T]]:
783
+ return self.__await().__await__()
784
+
785
+ def __len__(self) -> int:
786
+ return len(self.__view__)
787
+
788
+ async def aiterbykeys(self, reverse: bool = False) -> ASyncIterator[T]:
789
+ async for tup in ASyncSorter(
790
+ self.__mapping__.items(pop=self._pop), key=_get_key, reverse=reverse
791
+ ):
792
+ yield self._get_from_item(tup)
793
+
794
+ async def aiterbyvalues(self, reverse: bool = False) -> ASyncIterator[T]:
795
+ async for tup in ASyncSorter(
796
+ self.__mapping__.items(pop=self._pop), key=_get_value, reverse=reverse
797
+ ):
798
+ yield self._get_from_item(tup)
799
+
800
+ async def __await(self) -> List[T]:
801
+ return [result async for result in self]
802
+
803
+
804
+ class TaskMappingKeys(_TaskMappingView[K, K, V], Generic[K, V]):
805
+ """
806
+ Asynchronous view to iterate over the keys of a TaskMapping.
807
+ """
808
+
809
+ _get_from_item = lambda self, item: _get_key(item)
810
+
811
+ async def __aiter__(self) -> AsyncIterator[K]:
812
+ # strongref
813
+ mapping = self.__mapping__
814
+ mapping._if_pop_check_destroyed(self._pop)
815
+ yielded = set()
816
+ add_yielded = yielded.add
817
+ for key in self.__load_existing():
818
+ add_yielded(key)
819
+ # there is no chance of duplicate keys here
820
+ yield key
821
+ if mapping._init_loader is None:
822
+ await mapping._if_pop_clear(self._pop)
823
+ return
824
+ async for key in self.__load_init_loader(yielded):
825
+ add_yielded(key)
826
+ yield key
827
+ if self._pop:
828
+ # don't need to check yielded since we've been popping them as we go
829
+ for key in self.__load_existing():
830
+ yield key
831
+ await mapping._if_pop_clear(True)
832
+ else:
833
+ for key in self.__load_existing():
834
+ if key not in yielded:
835
+ yield key
836
+
837
+ def __load_existing(self) -> Iterator[K]:
838
+ # strongref
839
+ mapping = self.__mapping__
840
+ if self._pop:
841
+ pop = mapping.pop
842
+ for key in tuple(mapping):
843
+ pop(key)
844
+ yield key
845
+ else:
846
+ yield from tuple(mapping)
847
+
848
+ async def __load_init_loader(self, yielded: Set[K]) -> AsyncIterator[K]:
849
+ # sourcery skip: hoist-loop-from-if
850
+ # strongref
851
+ mapping = self.__mapping__
852
+ done = mapping._init_loader.done
853
+ wait_for_next_key = mapping._wait_for_next_key
854
+
855
+ if self._pop:
856
+ pop = mapping.pop
857
+ while not done():
858
+ await wait_for_next_key()
859
+ for key in tuple(filterfalse(yielded.__contains__, mapping)):
860
+ pop(key)
861
+ yield key
862
+ else:
863
+ while not done():
864
+ await wait_for_next_key()
865
+ for key in tuple(filterfalse(yielded.__contains__, mapping)):
866
+ yield key
867
+ # check for any exceptions
868
+ await mapping._init_loader
869
+
870
+
871
+ class TaskMappingItems(_TaskMappingView[Tuple[K, V], K, V], Generic[K, V]):
872
+ """
873
+ Asynchronous view to iterate over the items (key-value pairs) of a TaskMapping.
874
+ """
875
+
876
+ _get_from_item = lambda self, item: item
877
+
878
+ async def __aiter__(self) -> AsyncIterator[Tuple[K, V]]:
879
+ # strongref
880
+ mapping = self.__mapping__
881
+ mapping._if_pop_check_destroyed(self._pop)
882
+ if self._pop:
883
+ pop = mapping.pop
884
+ async for key in mapping.keys():
885
+ yield key, await pop(key)
886
+ else:
887
+ async for key in mapping.keys():
888
+ yield key, await mapping[key]
889
+
890
+
891
+ class TaskMappingValues(_TaskMappingView[V, K, V], Generic[K, V]):
892
+ """
893
+ Asynchronous view to iterate over the values of a TaskMapping.
894
+ """
895
+
896
+ _get_from_item = lambda self, item: _get_value(item)
897
+
898
+ async def __aiter__(self) -> AsyncIterator[V]:
899
+ # strongref
900
+ mapping = self.__mapping__
901
+ mapping._if_pop_check_destroyed(self._pop)
902
+ if self._pop:
903
+ pop = mapping.pop
904
+ async for key in mapping.keys():
905
+ yield await pop(key)
906
+ else:
907
+ async for key in mapping.keys():
908
+ yield await mapping[key]
909
+
910
+
911
+ __all__ = [
912
+ "TaskMapping",
913
+ "TaskMappingKeys",
914
+ "TaskMappingValues",
915
+ "TaskMappingItems",
916
+ ]