ez-a-sync 0.33.4__cp313-cp313-musllinux_1_2_i686.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (177) hide show
  1. a_sync/ENVIRONMENT_VARIABLES.py +42 -0
  2. a_sync/__init__.pxd +2 -0
  3. a_sync/__init__.py +145 -0
  4. a_sync/_smart.c +22830 -0
  5. a_sync/_smart.cpython-313-i386-linux-musl.so +0 -0
  6. a_sync/_smart.pxd +2 -0
  7. a_sync/_smart.pyi +202 -0
  8. a_sync/_smart.pyx +674 -0
  9. a_sync/_typing.py +258 -0
  10. a_sync/a_sync/__init__.py +60 -0
  11. a_sync/a_sync/_descriptor.c +20537 -0
  12. a_sync/a_sync/_descriptor.cpython-313-i386-linux-musl.so +0 -0
  13. a_sync/a_sync/_descriptor.pyi +33 -0
  14. a_sync/a_sync/_descriptor.pyx +422 -0
  15. a_sync/a_sync/_flags.c +6082 -0
  16. a_sync/a_sync/_flags.cpython-313-i386-linux-musl.so +0 -0
  17. a_sync/a_sync/_flags.pxd +3 -0
  18. a_sync/a_sync/_flags.pyx +92 -0
  19. a_sync/a_sync/_helpers.c +14529 -0
  20. a_sync/a_sync/_helpers.cpython-313-i386-linux-musl.so +0 -0
  21. a_sync/a_sync/_helpers.pxd +3 -0
  22. a_sync/a_sync/_helpers.pyi +10 -0
  23. a_sync/a_sync/_helpers.pyx +167 -0
  24. a_sync/a_sync/_kwargs.c +12202 -0
  25. a_sync/a_sync/_kwargs.cpython-313-i386-linux-musl.so +0 -0
  26. a_sync/a_sync/_kwargs.pxd +2 -0
  27. a_sync/a_sync/_kwargs.pyx +64 -0
  28. a_sync/a_sync/_meta.py +210 -0
  29. a_sync/a_sync/abstract.c +12420 -0
  30. a_sync/a_sync/abstract.cpython-313-i386-linux-musl.so +0 -0
  31. a_sync/a_sync/abstract.pyi +141 -0
  32. a_sync/a_sync/abstract.pyx +221 -0
  33. a_sync/a_sync/base.c +14940 -0
  34. a_sync/a_sync/base.cpython-313-i386-linux-musl.so +0 -0
  35. a_sync/a_sync/base.pyi +60 -0
  36. a_sync/a_sync/base.pyx +271 -0
  37. a_sync/a_sync/config.py +168 -0
  38. a_sync/a_sync/decorator.py +651 -0
  39. a_sync/a_sync/flags.c +5272 -0
  40. a_sync/a_sync/flags.cpython-313-i386-linux-musl.so +0 -0
  41. a_sync/a_sync/flags.pxd +72 -0
  42. a_sync/a_sync/flags.pyi +74 -0
  43. a_sync/a_sync/flags.pyx +72 -0
  44. a_sync/a_sync/function.c +37856 -0
  45. a_sync/a_sync/function.cpython-313-i386-linux-musl.so +0 -0
  46. a_sync/a_sync/function.pxd +28 -0
  47. a_sync/a_sync/function.pyi +571 -0
  48. a_sync/a_sync/function.pyx +1381 -0
  49. a_sync/a_sync/method.c +29662 -0
  50. a_sync/a_sync/method.cpython-313-i386-linux-musl.so +0 -0
  51. a_sync/a_sync/method.pxd +9 -0
  52. a_sync/a_sync/method.pyi +523 -0
  53. a_sync/a_sync/method.pyx +1023 -0
  54. a_sync/a_sync/modifiers/__init__.pxd +1 -0
  55. a_sync/a_sync/modifiers/__init__.py +101 -0
  56. a_sync/a_sync/modifiers/cache/__init__.py +160 -0
  57. a_sync/a_sync/modifiers/cache/memory.py +165 -0
  58. a_sync/a_sync/modifiers/limiter.py +132 -0
  59. a_sync/a_sync/modifiers/manager.c +16157 -0
  60. a_sync/a_sync/modifiers/manager.cpython-313-i386-linux-musl.so +0 -0
  61. a_sync/a_sync/modifiers/manager.pxd +5 -0
  62. a_sync/a_sync/modifiers/manager.pyi +219 -0
  63. a_sync/a_sync/modifiers/manager.pyx +299 -0
  64. a_sync/a_sync/modifiers/semaphores.py +173 -0
  65. a_sync/a_sync/property.c +27268 -0
  66. a_sync/a_sync/property.cpython-313-i386-linux-musl.so +0 -0
  67. a_sync/a_sync/property.pyi +376 -0
  68. a_sync/a_sync/property.pyx +819 -0
  69. a_sync/a_sync/singleton.py +63 -0
  70. a_sync/aliases.py +3 -0
  71. a_sync/async_property/__init__.pxd +1 -0
  72. a_sync/async_property/__init__.py +1 -0
  73. a_sync/async_property/cached.c +20397 -0
  74. a_sync/async_property/cached.cpython-313-i386-linux-musl.so +0 -0
  75. a_sync/async_property/cached.pxd +10 -0
  76. a_sync/async_property/cached.pyi +45 -0
  77. a_sync/async_property/cached.pyx +178 -0
  78. a_sync/async_property/proxy.c +34662 -0
  79. a_sync/async_property/proxy.cpython-313-i386-linux-musl.so +0 -0
  80. a_sync/async_property/proxy.pxd +2 -0
  81. a_sync/async_property/proxy.pyi +124 -0
  82. a_sync/async_property/proxy.pyx +474 -0
  83. a_sync/asyncio/__init__.pxd +6 -0
  84. a_sync/asyncio/__init__.py +164 -0
  85. a_sync/asyncio/as_completed.c +18849 -0
  86. a_sync/asyncio/as_completed.cpython-313-i386-linux-musl.so +0 -0
  87. a_sync/asyncio/as_completed.pxd +8 -0
  88. a_sync/asyncio/as_completed.pyi +109 -0
  89. a_sync/asyncio/as_completed.pyx +269 -0
  90. a_sync/asyncio/create_task.c +15912 -0
  91. a_sync/asyncio/create_task.cpython-313-i386-linux-musl.so +0 -0
  92. a_sync/asyncio/create_task.pxd +2 -0
  93. a_sync/asyncio/create_task.pyi +51 -0
  94. a_sync/asyncio/create_task.pyx +271 -0
  95. a_sync/asyncio/gather.c +16687 -0
  96. a_sync/asyncio/gather.cpython-313-i386-linux-musl.so +0 -0
  97. a_sync/asyncio/gather.pyi +107 -0
  98. a_sync/asyncio/gather.pyx +218 -0
  99. a_sync/asyncio/igather.c +13080 -0
  100. a_sync/asyncio/igather.cpython-313-i386-linux-musl.so +0 -0
  101. a_sync/asyncio/igather.pxd +1 -0
  102. a_sync/asyncio/igather.pyi +8 -0
  103. a_sync/asyncio/igather.pyx +183 -0
  104. a_sync/asyncio/sleep.c +9601 -0
  105. a_sync/asyncio/sleep.cpython-313-i386-linux-musl.so +0 -0
  106. a_sync/asyncio/sleep.pyi +14 -0
  107. a_sync/asyncio/sleep.pyx +49 -0
  108. a_sync/debugging.c +15370 -0
  109. a_sync/debugging.cpython-313-i386-linux-musl.so +0 -0
  110. a_sync/debugging.pyi +76 -0
  111. a_sync/debugging.pyx +107 -0
  112. a_sync/exceptions.c +13320 -0
  113. a_sync/exceptions.cpython-313-i386-linux-musl.so +0 -0
  114. a_sync/exceptions.pyi +376 -0
  115. a_sync/exceptions.pyx +446 -0
  116. a_sync/executor.py +619 -0
  117. a_sync/functools.c +12746 -0
  118. a_sync/functools.cpython-313-i386-linux-musl.so +0 -0
  119. a_sync/functools.pxd +7 -0
  120. a_sync/functools.pyi +33 -0
  121. a_sync/functools.pyx +139 -0
  122. a_sync/future.py +1497 -0
  123. a_sync/iter.c +37279 -0
  124. a_sync/iter.cpython-313-i386-linux-musl.so +0 -0
  125. a_sync/iter.pxd +11 -0
  126. a_sync/iter.pyi +370 -0
  127. a_sync/iter.pyx +981 -0
  128. a_sync/primitives/__init__.pxd +1 -0
  129. a_sync/primitives/__init__.py +53 -0
  130. a_sync/primitives/_debug.c +15765 -0
  131. a_sync/primitives/_debug.cpython-313-i386-linux-musl.so +0 -0
  132. a_sync/primitives/_debug.pxd +12 -0
  133. a_sync/primitives/_debug.pyi +52 -0
  134. a_sync/primitives/_debug.pyx +223 -0
  135. a_sync/primitives/_loggable.c +11538 -0
  136. a_sync/primitives/_loggable.cpython-313-i386-linux-musl.so +0 -0
  137. a_sync/primitives/_loggable.pxd +4 -0
  138. a_sync/primitives/_loggable.pyi +66 -0
  139. a_sync/primitives/_loggable.pyx +102 -0
  140. a_sync/primitives/locks/__init__.pxd +8 -0
  141. a_sync/primitives/locks/__init__.py +17 -0
  142. a_sync/primitives/locks/counter.c +17938 -0
  143. a_sync/primitives/locks/counter.cpython-313-i386-linux-musl.so +0 -0
  144. a_sync/primitives/locks/counter.pxd +12 -0
  145. a_sync/primitives/locks/counter.pyi +151 -0
  146. a_sync/primitives/locks/counter.pyx +267 -0
  147. a_sync/primitives/locks/event.c +17072 -0
  148. a_sync/primitives/locks/event.cpython-313-i386-linux-musl.so +0 -0
  149. a_sync/primitives/locks/event.pxd +22 -0
  150. a_sync/primitives/locks/event.pyi +43 -0
  151. a_sync/primitives/locks/event.pyx +185 -0
  152. a_sync/primitives/locks/prio_semaphore.c +25635 -0
  153. a_sync/primitives/locks/prio_semaphore.cpython-313-i386-linux-musl.so +0 -0
  154. a_sync/primitives/locks/prio_semaphore.pxd +25 -0
  155. a_sync/primitives/locks/prio_semaphore.pyi +217 -0
  156. a_sync/primitives/locks/prio_semaphore.pyx +597 -0
  157. a_sync/primitives/locks/semaphore.c +26553 -0
  158. a_sync/primitives/locks/semaphore.cpython-313-i386-linux-musl.so +0 -0
  159. a_sync/primitives/locks/semaphore.pxd +21 -0
  160. a_sync/primitives/locks/semaphore.pyi +197 -0
  161. a_sync/primitives/locks/semaphore.pyx +454 -0
  162. a_sync/primitives/queue.py +1026 -0
  163. a_sync/py.typed +0 -0
  164. a_sync/sphinx/__init__.py +3 -0
  165. a_sync/sphinx/ext.py +289 -0
  166. a_sync/task.py +934 -0
  167. a_sync/utils/__init__.py +105 -0
  168. a_sync/utils/iterators.py +297 -0
  169. a_sync/utils/repr.c +15866 -0
  170. a_sync/utils/repr.cpython-313-i386-linux-musl.so +0 -0
  171. a_sync/utils/repr.pyi +2 -0
  172. a_sync/utils/repr.pyx +73 -0
  173. ez_a_sync-0.33.4.dist-info/METADATA +368 -0
  174. ez_a_sync-0.33.4.dist-info/RECORD +177 -0
  175. ez_a_sync-0.33.4.dist-info/WHEEL +5 -0
  176. ez_a_sync-0.33.4.dist-info/licenses/LICENSE.txt +17 -0
  177. ez_a_sync-0.33.4.dist-info/top_level.txt +1 -0
a_sync/task.py ADDED
@@ -0,0 +1,934 @@
1
+ """
2
+ This module provides asynchronous task management utilities, specifically focused on creating and handling mappings of tasks.
3
+
4
+ The main components include:
5
+ - TaskMapping: A class for managing and asynchronously generating tasks based on input iterables.
6
+ - TaskMappingKeys: A view to asynchronously iterate over the keys of a TaskMapping.
7
+ - TaskMappingValues: A view to asynchronously iterate over the values of a TaskMapping.
8
+ - TaskMappingItems: A view to asynchronously iterate over the items (key-value pairs) of a TaskMapping.
9
+ """
10
+
11
+ from asyncio import FIRST_COMPLETED, Future, Task, sleep, wait
12
+ from functools import wraps
13
+ from inspect import getfullargspec, isawaitable
14
+ from itertools import filterfalse
15
+ from logging import getLogger
16
+ from weakref import WeakKeyDictionary, proxy
17
+
18
+ from a_sync import exceptions
19
+ from a_sync._typing import *
20
+ from a_sync.a_sync._kwargs import _get_flag_name
21
+ from a_sync.a_sync.base import ASyncGenericBase
22
+ from a_sync.a_sync.function import ASyncFunction
23
+ from a_sync.a_sync.method import (
24
+ ASyncBoundMethod,
25
+ ASyncMethodDescriptor,
26
+ ASyncMethodDescriptorSyncDefault,
27
+ )
28
+ from a_sync.a_sync.property import _ASyncPropertyDescriptorBase
29
+ from a_sync.asyncio import as_completed, create_task, gather
30
+ from a_sync.asyncio.gather import Excluder
31
+ from a_sync.asyncio.sleep import sleep0 as yield_to_loop
32
+ from a_sync.functools import cached_property_unsafe
33
+ from a_sync.iter import ASyncIterator, ASyncGeneratorFunction, ASyncSorter
34
+ from a_sync.primitives.locks import Event
35
+ from a_sync.primitives.queue import Queue, ProcessingQueue
36
+ from a_sync.utils.iterators import as_yielded, exhaust_iterator
37
+ from a_sync.utils.repr import repr_trunc
38
+
39
+
40
+ logger = getLogger(__name__)
41
+
42
+
43
+ MappingFn = Callable[Concatenate[K, P], Awaitable[V]]
44
+
45
+
46
+ _args = WeakKeyDictionary()
47
+
48
+
49
+ class TaskMapping(DefaultDict[K, "Task[V]"], AsyncIterable[Tuple[K, V]]):
50
+ """
51
+ A mapping of keys to asynchronous tasks with additional functionality.
52
+
53
+ `TaskMapping` is a specialized dictionary that maps keys to `asyncio` Tasks. It provides
54
+ convenient methods for creating, managing, and iterating over these tasks asynchronously.
55
+
56
+ Tasks are created automatically for each key using a provided function. You cannot manually set items in a `TaskMapping` using dictionary-like syntax.
57
+
58
+ Example:
59
+ >>> async def fetch_data(url: str) -> str:
60
+ ... async with aiohttp.ClientSession() as session:
61
+ ... async with session.get(url) as response:
62
+ ... return await response.text()
63
+ ...
64
+ >>> tasks = TaskMapping(fetch_data, ['http://example.com', 'https://www.python.org'], name='url_fetcher', concurrency=5)
65
+ >>> async for key, result in tasks:
66
+ ... print(f"Data for {key}: {result}")
67
+ ...
68
+ Data for python.org: http://python.org
69
+ Data for example.com: http://example.com
70
+
71
+ Note:
72
+ You cannot manually set items in a `TaskMapping` using dictionary-like syntax. Tasks are created and managed internally.
73
+
74
+ See Also:
75
+ - :class:`asyncio.Task`
76
+ - :func:`asyncio.create_task`
77
+ - :func:`a_sync.asyncio.create_task`
78
+ """
79
+
80
+ concurrency: Optional[int] = None
81
+ "The max number of tasks that will run at one time."
82
+
83
+ _destroyed: bool = False
84
+ "Boolean indicating whether his mapping has been consumed and is no longer usable for aggregations."
85
+
86
+ _init_loader: Optional["Task[None]"] = None
87
+ "An asyncio Task used to preload values from the iterables."
88
+
89
+ _init_loader_next: Optional[Callable[[], Coroutine[Any, Any, Tuple[Tuple[K, "Task[V]"]]]]] = (
90
+ None
91
+ )
92
+ "A coro function that blocks until the _init_loader starts a new task(s), and then returns a `Tuple[Tuple[K, Task[V]]]` with all of the new tasks and the keys that started them."
93
+
94
+ _name: Optional[str] = None
95
+ "Optional name for tasks created by this mapping."
96
+
97
+ _next: Event = None
98
+ "An asyncio Event that indicates the next result is ready"
99
+
100
+ _wrapped_func_kwargs: Dict[str, Any] = {}
101
+ "Additional keyword arguments passed to `_wrapped_func`."
102
+
103
+ __iterables__: Tuple[AnyIterableOrAwaitableIterable[K], ...] = ()
104
+ "The original iterables, if any, used to initialize this mapping."
105
+
106
+ __init_loader_coro: Optional[Coroutine[Any, Any, None]] = None
107
+ """An optional asyncio Coroutine to be run by the `_init_loader`"""
108
+
109
+ __slots__ = "_wrapped_func", "__wrapped__", "__dict__", "__weakref__"
110
+
111
+ # NOTE: maybe since we use so many classvars here we are better off getting rid of slots
112
+ def __init__(
113
+ self,
114
+ wrapped_func: MappingFn[K, P, V] = None,
115
+ *iterables: AnyIterableOrAwaitableIterable[K],
116
+ name: str = "",
117
+ concurrency: Optional[int] = None,
118
+ **wrapped_func_kwargs: P.kwargs,
119
+ ) -> None:
120
+ """
121
+ Initialize a TaskMapping instance.
122
+
123
+ Args:
124
+ wrapped_func: A callable that takes a key and additional parameters and returns an Awaitable.
125
+ *iterables: Any number of iterables whose elements will be used as keys for task generation.
126
+ name: An optional name for the tasks created by this mapping.
127
+ concurrency: Maximum number of tasks to run concurrently.
128
+ **wrapped_func_kwargs: Additional keyword arguments to be passed to wrapped_func.
129
+
130
+ Example:
131
+ async def process_item(item: int) -> int:
132
+ await asyncio.sleep(1)
133
+ return item * 2
134
+
135
+ task_map = TaskMapping(process_item, [1, 2, 3], concurrency=2)
136
+ """
137
+
138
+ if concurrency:
139
+ self.concurrency = concurrency
140
+
141
+ self.__wrapped__ = wrapped_func
142
+ "The original callable used to initialize this mapping without any modifications."
143
+
144
+ if iterables:
145
+ self.__iterables__ = iterables
146
+
147
+ wrapped_func = _unwrap(wrapped_func)
148
+ self._wrapped_func = wrapped_func
149
+ "The function used to create tasks for each key."
150
+
151
+ if isinstance(wrapped_func, ASyncMethodDescriptor) and not _get_flag_name(
152
+ wrapped_func_kwargs
153
+ ):
154
+ wrapped_func_kwargs["sync"] = False
155
+ if wrapped_func_kwargs:
156
+ self._wrapped_func_kwargs = wrapped_func_kwargs
157
+
158
+ if name:
159
+ self._name = name
160
+
161
+ self._next = Event(name=f"{self} `_next`")
162
+
163
+ if iterables:
164
+
165
+ set_next = self._next.set
166
+ clear_next = self._next.clear
167
+
168
+ @wraps(wrapped_func)
169
+ async def _wrapped_set_next(
170
+ *args: P.args, __a_sync_recursion: int = 0, **kwargs: P.kwargs
171
+ ) -> V:
172
+ try:
173
+ return await wrapped_func(*args, **kwargs)
174
+ except exceptions.SyncModeInAsyncContextError as e:
175
+ e.args = *e.args, f"wrapped:{self.__wrapped__}"
176
+ raise
177
+ except TypeError as e:
178
+ if (
179
+ args is None
180
+ or __a_sync_recursion > 2
181
+ or not (
182
+ str(e).startswith(wrapped_func.__name__)
183
+ and "got multiple values for argument" in str(e)
184
+ )
185
+ ):
186
+ raise
187
+
188
+ # NOTE: args ordering is clashing with provided kwargs. We can handle this in a hacky way.
189
+ # TODO: perform this check earlier and pre-prepare the args/kwargs ordering
190
+ try:
191
+ argspec = _args[self.__wrapped__]
192
+ except KeyError:
193
+ argspec = _args[self.__wrapped__] = getfullargspec(self.__wrapped__).args
194
+
195
+ new_args = list(args)
196
+ new_kwargs = dict(kwargs)
197
+ try:
198
+ for i, arg in enumerate(argspec):
199
+ if arg in kwargs:
200
+ new_args.insert(i, new_kwargs.pop(arg))
201
+ else:
202
+ break
203
+ return await _wrapped_set_next(
204
+ *new_args,
205
+ **new_kwargs,
206
+ __a_sync_recursion=__a_sync_recursion + 1,
207
+ )
208
+ except TypeError as e2:
209
+ raise (
210
+ e.with_traceback(e.__traceback__)
211
+ if str(e2) == "unsupported callable"
212
+ else e2.with_traceback(e2.__traceback__)
213
+ )
214
+ finally:
215
+ set_next()
216
+ clear_next()
217
+
218
+ self._wrapped_func = _wrapped_set_next
219
+ init_loader_queue: Queue[Tuple[K, "Future[V]"]] = Queue()
220
+ self.__init_loader_coro = exhaust_iterator(
221
+ self._start_tasks_for_iterables(*iterables), queue=init_loader_queue
222
+ )
223
+ self._init_loader_next = init_loader_queue.get_all
224
+
225
+ try:
226
+ self._init_loader
227
+ except _NoRunningLoop:
228
+ # its okay if we get this exception, we can start the task as soon as the loop starts
229
+ pass
230
+
231
+ def __repr__(self) -> str:
232
+ return f"<{type(self).__name__} for {self._wrapped_func} kwargs={self._wrapped_func_kwargs} tasks={len(self)} at {hex(id(self))}>"
233
+
234
+ def __hash__(self) -> int:
235
+ return id(self)
236
+
237
+ def __setitem__(self, item: Any, value: Any) -> None:
238
+ raise NotImplementedError("You cannot manually set items in a TaskMapping")
239
+
240
+ def __getitem__(self, item: K) -> "Task[V]":
241
+ try:
242
+ return dict.__getitem__(self, item)
243
+ except KeyError:
244
+ return self.__start_task(item)
245
+
246
+ def __await__(self) -> Generator[Any, None, Dict[K, V]]:
247
+ """Wait for all tasks to complete and return a dictionary of the results."""
248
+ return self.gather(sync=False).__await__()
249
+
250
+ async def __aiter__(self, pop: bool = False) -> AsyncIterator[Tuple[K, V]]:
251
+ # sourcery skip: hoist-loop-from-if, hoist-similar-statement-from-if, hoist-statement-from-if
252
+ """Asynchronously iterate through all key-task pairs, yielding the key-result pair as each task completes."""
253
+
254
+ self._if_pop_check_destroyed(pop)
255
+
256
+ # if you inited the TaskMapping with some iterators, we will load those
257
+ yielded = set()
258
+ add_yielded = yielded.add
259
+ try:
260
+ if self._init_loader is None:
261
+ # if you didn't init the TaskMapping with iterators and you didn't start any tasks manually, we should fail
262
+ self._raise_if_empty()
263
+ else:
264
+ while not self._init_loader.done():
265
+ await self._wait_for_next_key()
266
+ while unyielded := tuple(key for key in self if key not in yielded):
267
+ if ready := tuple(key for key in unyielded if self[key].done()):
268
+ if pop:
269
+ self_pop = self.pop
270
+ for key in ready:
271
+ yield key, self_pop(key).result()
272
+ add_yielded(key)
273
+ else:
274
+ for key in ready:
275
+ yield key, self[key].result()
276
+ add_yielded(key)
277
+ else:
278
+ await self._next.wait()
279
+ # loader is already done by this point, but we need to check for exceptions
280
+ await self._init_loader
281
+ # if there are any tasks that still need to complete, we need to await them and yield them
282
+ if unyielded := {key: self[key] for key in self if key not in yielded}:
283
+ if pop:
284
+ self_pop = self.pop
285
+ async for key, value in as_completed(unyielded, aiter=True):
286
+ self_pop(key)
287
+ yield key, value
288
+ else:
289
+ async for key, value in as_completed(unyielded, aiter=True):
290
+ yield key, value
291
+ finally:
292
+ await self._if_pop_clear(pop)
293
+
294
+ def __delitem__(self, item: K) -> None:
295
+ task_or_fut = dict.__getitem__(self, item)
296
+ if not task_or_fut.done():
297
+ task_or_fut.cancel()
298
+ dict.__delitem__(self, item)
299
+
300
+ def keys(self, pop: bool = False) -> "TaskMappingKeys[K, V]":
301
+ return TaskMappingKeys(dict.keys(self), self, pop=pop)
302
+
303
+ def values(self, pop: bool = False) -> "TaskMappingValues[K, V]":
304
+ return TaskMappingValues(dict.values(self), self, pop=pop)
305
+
306
+ def items(self, pop: bool = False) -> "TaskMappingValues[K, V]":
307
+ return TaskMappingItems(dict.items(self), self, pop=pop)
308
+
309
+ async def close(self) -> None:
310
+ await self._if_pop_clear(True)
311
+
312
+ @ASyncGeneratorFunction
313
+ async def map(
314
+ self,
315
+ *iterables: AnyIterableOrAwaitableIterable[K],
316
+ pop: bool = True,
317
+ yields: Literal["keys", "both"] = "both",
318
+ ) -> AsyncIterator[Tuple[K, V]]:
319
+ # sourcery skip: hoist-similar-statement-from-if
320
+ """
321
+ Asynchronously map iterables to tasks and yield their results.
322
+
323
+ Args:
324
+ *iterables: Iterables to map over.
325
+ pop: Whether to remove tasks from the internal storage once they are completed.
326
+ yields: Whether to yield 'keys', 'values', or 'both' (key-value pairs).
327
+
328
+ Yields:
329
+ Depending on `yields`, either keys, values,
330
+ or tuples of key-value pairs representing the results of completed tasks.
331
+
332
+ Example:
333
+ async def process_item(item: int) -> int:
334
+ await asyncio.sleep(1)
335
+ return item * 2
336
+
337
+ task_map = TaskMapping(process_item)
338
+ async for key, result in task_map.map([1, 2, 3]):
339
+ print(f"Processed {key}: {result}")
340
+ """
341
+ self._if_pop_check_destroyed(pop)
342
+
343
+ # make sure the init loader is started if needed
344
+ init_loader = self._init_loader
345
+ if iterables and init_loader:
346
+ raise ValueError(
347
+ "You cannot pass `iterables` to map if the TaskMapping was initialized with an (a)iterable."
348
+ )
349
+
350
+ try:
351
+ if iterables:
352
+ self._raise_if_not_empty()
353
+
354
+ def callback(t: Task):
355
+ self._next.set()
356
+
357
+ try:
358
+ async for k, t in self._tasks_for_iterables(*iterables):
359
+ t.add_done_callback(callback)
360
+ if self._next.is_set():
361
+ async for key, value in self.yield_completed(pop=pop):
362
+ yield _yield(key, value, yields)
363
+ self._next.clear()
364
+ except _EmptySequenceError:
365
+ if len(iterables) > 1:
366
+ # TODO gotta handle this situation
367
+ raise exceptions.EmptySequenceError(
368
+ "bob needs to code something so you can do this, go tell him"
369
+ ) from None
370
+ # just pass thru
371
+ finally:
372
+ self._next.clear()
373
+
374
+ elif init_loader:
375
+ # check for exceptions if you passed an iterable(s) into the class init
376
+ await init_loader
377
+
378
+ else:
379
+ self._raise_if_empty(
380
+ "You must either initialize your TaskMapping with an iterable(s) or provide them during your call to map"
381
+ )
382
+
383
+ if self:
384
+ if pop:
385
+ self_pop = self.pop
386
+ async for key, value in as_completed(self, aiter=True):
387
+ self_pop(key)
388
+ yield _yield(key, value, yields)
389
+ else:
390
+ async for key, value in as_completed(self, aiter=True):
391
+ yield _yield(key, value, yields)
392
+ finally:
393
+ await self._if_pop_clear(pop)
394
+
395
+ @ASyncMethodDescriptorSyncDefault
396
+ async def all(self, pop: bool = True) -> bool:
397
+ try:
398
+ async for key, result in self.__aiter__(pop=pop):
399
+ if not bool(result):
400
+ return False
401
+ return True
402
+ except _EmptySequenceError:
403
+ return True
404
+ finally:
405
+ await self._if_pop_clear(pop)
406
+
407
+ @ASyncMethodDescriptorSyncDefault
408
+ async def any(self, pop: bool = True) -> bool:
409
+ try:
410
+ async for key, result in self.__aiter__(pop=pop):
411
+ if bool(result):
412
+ return True
413
+ return False
414
+ except _EmptySequenceError:
415
+ return False
416
+ finally:
417
+ await self._if_pop_clear(pop)
418
+
419
+ @ASyncMethodDescriptorSyncDefault
420
+ async def max(self, pop: bool = True) -> V:
421
+ # sourcery skip: avoid-builtin-shadow
422
+ max = None
423
+ try:
424
+ async for key, result in self.__aiter__(pop=pop):
425
+ if max is None or result > max:
426
+ max = result
427
+ except _EmptySequenceError:
428
+ raise exceptions.EmptySequenceError("max() arg is an empty sequence") from None
429
+ if max is None:
430
+ raise exceptions.EmptySequenceError("max() arg is an empty sequence") from None
431
+ return max
432
+
433
+ @ASyncMethodDescriptorSyncDefault
434
+ async def min(self, pop: bool = True) -> V:
435
+ # sourcery skip: avoid-builtin-shadow
436
+ """Return the minimum result from the tasks in the mapping."""
437
+ min = None
438
+ try:
439
+ async for key, result in self.__aiter__(pop=pop):
440
+ if min is None or result < min:
441
+ min = result
442
+ except _EmptySequenceError:
443
+ raise exceptions.EmptySequenceError("min() arg is an empty sequence") from None
444
+ if min is None:
445
+ raise exceptions.EmptySequenceError("min() arg is an empty sequence") from None
446
+ return min
447
+
448
+ @ASyncMethodDescriptorSyncDefault
449
+ async def sum(self, pop: bool = False) -> V:
450
+ """Return the sum of the results from the tasks in the mapping."""
451
+ retval = 0
452
+ try:
453
+ async for key, result in self.__aiter__(pop=pop):
454
+ retval += result
455
+ except _EmptySequenceError:
456
+ return 0
457
+ return retval
458
+
459
+ @ASyncIterator.wrap
460
+ async def yield_completed(self, pop: bool = True) -> AsyncIterator[Tuple[K, V]]:
461
+ """
462
+ Asynchronously yield tuples of key-value pairs representing the results of any completed tasks.
463
+
464
+ Args:
465
+ pop: Whether to remove tasks from the internal storage once they are completed.
466
+
467
+ Yields:
468
+ Tuples of key-value pairs representing the results of completed tasks.
469
+
470
+ Example:
471
+ async def process_item(item: int) -> int:
472
+ await asyncio.sleep(1)
473
+ return item * 2
474
+
475
+ task_map = TaskMapping(process_item, [1, 2, 3])
476
+ async for key, result in task_map.yield_completed():
477
+ print(f"Completed {key}: {result}")
478
+ """
479
+ task: Task
480
+ if pop:
481
+ self_pop = self.pop
482
+ for k in tuple(k for k, task in dict.items(self) if task.done()):
483
+ yield k, self_pop(k).result()
484
+ else:
485
+ for k, task in dict.items(self):
486
+ if task.done():
487
+ yield k, task.result()
488
+
489
+ @ASyncMethodDescriptorSyncDefault
490
+ async def gather(
491
+ self,
492
+ return_exceptions: bool = False,
493
+ exclude_if: Excluder[V] = None,
494
+ tqdm: bool = False,
495
+ **tqdm_kwargs: Any,
496
+ ) -> Dict[K, V]:
497
+ """Wait for all tasks to complete and return a dictionary of the results."""
498
+ if self._init_loader:
499
+ await self._init_loader
500
+ self._raise_if_empty()
501
+ return await gather(
502
+ self,
503
+ return_exceptions=return_exceptions,
504
+ exclude_if=exclude_if,
505
+ tqdm=tqdm,
506
+ **tqdm_kwargs,
507
+ )
508
+
509
+ @overload
510
+ def pop(self, item: K, *, cancel: bool = False) -> "Union[Task[V], Future[V]]":
511
+ """Pop a task from the TaskMapping.
512
+
513
+ Args:
514
+ item: The key to pop.
515
+ cancel: Whether to cancel the task when popping it.
516
+ """
517
+
518
+ @overload
519
+ def pop(self, item: K, default: K, *, cancel: bool = False) -> "Union[Task[V], Future[V]]":
520
+ """Pop a task from the TaskMapping.
521
+
522
+ Args:
523
+ item: The key to pop.
524
+ default: The default value to return if no matching key is found.
525
+ cancel: Whether to cancel the task when popping it.
526
+ """
527
+
528
+ def pop(self, *args: K, cancel: bool = False) -> "Union[Task[V], Future[V]]":
529
+ """Pop a task from the TaskMapping.
530
+
531
+ Args:
532
+ *args: One key to pop.
533
+ cancel: Whether to cancel the task when popping it.
534
+ """
535
+ fut_or_task = dict.pop(self, *args)
536
+ if cancel:
537
+ fut_or_task.cancel()
538
+ return fut_or_task
539
+
540
+ def clear(self, cancel: bool = False) -> None:
541
+ """# TODO write docs for this"""
542
+ if cancel and self._init_loader and not self._init_loader.done():
543
+ logger.debug("cancelling %s", self._init_loader)
544
+ self._init_loader.cancel()
545
+ if keys := tuple(self.keys()):
546
+ logger.debug("popping remaining %s tasks", self)
547
+ pop = self.pop
548
+ for k in keys:
549
+ pop(k, cancel=cancel)
550
+
551
+ @cached_property_unsafe
552
+ def _init_loader(self) -> Optional["Task[None]"]:
553
+ # sourcery skip: raise-from-previous-error
554
+ if self.__init_loader_coro is None:
555
+ return None
556
+
557
+ logger.debug("starting %s init loader", self)
558
+ if len(iterables := self.__iterables__) == 1:
559
+ iterables_repr = repr_trunc(iterables[0])
560
+ else:
561
+ iterables_repr = f"({', '.join(map(repr_trunc, iterables))})"
562
+ try:
563
+ task = create_task(
564
+ coro=self.__init_loader_coro,
565
+ name=f"{type(self).__name__} init loader loading {iterables_repr} for {self}",
566
+ )
567
+ except RuntimeError as e:
568
+ raise _NoRunningLoop if str(e) == "no running event loop" else e
569
+ task.add_done_callback(self.__cleanup)
570
+ return task
571
+
572
+ @cached_property_unsafe
573
+ def _queue(self) -> ProcessingQueue:
574
+ fn = lambda arg: self._wrapped_func(arg, **self._wrapped_func_kwargs)
575
+ return ProcessingQueue(fn, self.concurrency, name=self._name)
576
+
577
+ def _raise_if_empty(self, msg: str = "") -> None:
578
+ if not self:
579
+ raise exceptions.MappingIsEmptyError(self, msg)
580
+
581
+ def _raise_if_not_empty(self) -> None:
582
+ if self:
583
+ raise exceptions.MappingNotEmptyError(self)
584
+
585
+ @ASyncGeneratorFunction
586
+ async def _tasks_for_iterables(
587
+ self, *iterables: AnyIterableOrAwaitableIterable[K]
588
+ ) -> AsyncIterator[Tuple[K, "Task[V]"]]:
589
+ """Ensure tasks are running for each key in the provided iterables."""
590
+ # if we have any regular containers we can yield their contents right away
591
+ containers = tuple(
592
+ iterable
593
+ for iterable in iterables
594
+ if not isinstance(iterable, AsyncIterable) and isinstance(iterable, Iterable)
595
+ )
596
+ for iterable in containers:
597
+ async for key in _yield_keys(iterable):
598
+ yield key, self[key]
599
+
600
+ if remaining := tuple(iterable for iterable in iterables if iterable not in containers):
601
+ try:
602
+ async for key in as_yielded(*(_yield_keys(iterable) for iterable in remaining)): # type: ignore [attr-defined]
603
+ yield key, self[key] # ensure task is running
604
+ except _EmptySequenceError:
605
+ if len(iterables) == 1:
606
+ raise
607
+ raise RuntimeError("DEV: figure out how to handle this situation") from None
608
+
609
+ @ASyncGeneratorFunction
610
+ async def _start_tasks_for_iterables(
611
+ self, *iterables: AnyIterableOrAwaitableIterable[K]
612
+ ) -> AsyncIterator[Tuple[K, "Task[V]"]]:
613
+ """Start new tasks for each key in the provided iterables."""
614
+ # if we have any regular containers we can yield their contents right away
615
+ containers = tuple(
616
+ iterable
617
+ for iterable in iterables
618
+ if not isinstance(iterable, AsyncIterable) and isinstance(iterable, Iterable)
619
+ )
620
+ for iterable in containers:
621
+ async for key in _yield_keys(iterable):
622
+ yield key, self.__start_task(key)
623
+
624
+ if remaining := tuple(iterable for iterable in iterables if iterable not in containers):
625
+ try:
626
+ async for key in as_yielded(*(_yield_keys(iterable) for iterable in remaining)): # type: ignore [attr-defined]
627
+ yield key, self.__start_task(key)
628
+ except _EmptySequenceError:
629
+ if len(iterables) == 1:
630
+ raise
631
+ raise RuntimeError("DEV: figure out how to handle this situation") from None
632
+
633
+ def _if_pop_check_destroyed(self, pop: bool) -> None:
634
+ if pop:
635
+ if self._destroyed:
636
+ raise RuntimeError(f"{self} has already been consumed")
637
+ self._destroyed = True
638
+
639
+ async def _if_pop_clear(self, pop: bool) -> None:
640
+ if pop:
641
+ self._destroyed = True
642
+ # _queue is a cached_property, we don't want to create it if it doesn't exist
643
+ if self.concurrency and "_queue" in self.__dict__:
644
+ self._queue.close()
645
+ del self._queue
646
+ self.clear(cancel=True)
647
+ # we need to let the loop run once so the tasks can fully cancel
648
+ await yield_to_loop()
649
+
650
+ async def _wait_for_next_key(self) -> None:
651
+ # NOTE if `_init_loader` has an exception it will return first, otherwise `_init_loader_next` will return always
652
+ done, pending = await wait(
653
+ (
654
+ create_task(self._init_loader_next(), log_destroy_pending=False),
655
+ self._init_loader,
656
+ ),
657
+ return_when=FIRST_COMPLETED,
658
+ )
659
+ task: Task
660
+ for task in done:
661
+ # check for exceptions
662
+ task.result()
663
+
664
+ def __start_task(self, item: K) -> "Future[V]":
665
+ if self.concurrency:
666
+ # NOTE: we use a queue instead of a Semaphore to reduce memory use for use cases involving many many tasks
667
+ fut = self._queue.put_nowait(item)
668
+ else:
669
+ fut = create_task(
670
+ coro=self._wrapped_func(item, **self._wrapped_func_kwargs),
671
+ name=f"{item}" if self._name is None else f"{self._name}[{item}]",
672
+ )
673
+ dict.__setitem__(self, item, fut)
674
+ return fut
675
+
676
+ def __cleanup(self, t: "Task[None]") -> None:
677
+ # clear the slot and let the bound Queue die
678
+ del self.__init_loader_coro
679
+
680
+
681
+ class _NoRunningLoop(Exception): ...
682
+
683
+
684
+ @overload
685
+ def _yield(
686
+ key: K, value: V, yields: Literal["keys"]
687
+ ) -> K: ... # TODO write specific docs for this overload
688
+ @overload
689
+ def _yield(
690
+ key: K, value: V, yields: Literal["both"]
691
+ ) -> Tuple[K, V]: ... # TODO write specific docs for this overload
692
+ def _yield(key: K, value: V, yields: Literal["keys", "both"]) -> Union[K, Tuple[K, V]]:
693
+ """
694
+ Yield either the key, value, or both based on the 'yields' parameter.
695
+
696
+ Args:
697
+ key: The key of the task.
698
+ value: The result of the task.
699
+ yields: Determines what to yield; 'keys' for keys, 'both' for key-value pairs.
700
+
701
+ Returns:
702
+ The key, the value, or a tuple of both based on the 'yields' parameter.
703
+ """
704
+ if yields == "both":
705
+ return key, value
706
+ elif yields == "keys":
707
+ return key
708
+ else:
709
+ raise ValueError(f"`yields` must be 'keys' or 'both'. You passed {yields}")
710
+
711
+
712
+ class _EmptySequenceError(ValueError): ...
713
+
714
+
715
+ async def _yield_keys(iterable: AnyIterableOrAwaitableIterable[K]) -> AsyncIterator[K]:
716
+ """
717
+ Asynchronously yield keys from the provided iterable.
718
+
719
+ Args:
720
+ iterable: An iterable that can be either synchronous or asynchronous.
721
+
722
+ Yields:
723
+ Keys extracted from the iterable.
724
+ """
725
+ if not iterable:
726
+ raise _EmptySequenceError(iterable)
727
+
728
+ elif isinstance(iterable, AsyncIterable):
729
+ async for key in iterable:
730
+ yield key
731
+
732
+ elif isinstance(iterable, Iterable):
733
+ yielded = 0
734
+ for key in iterable:
735
+ yield key
736
+ yielded += 1
737
+ if not yielded % 5_000: # arbitrary number, should be adjusted later
738
+ await yield_to_loop()
739
+
740
+ elif isawaitable(iterable):
741
+ async for key in _yield_keys(await iterable):
742
+ yield key
743
+
744
+ else:
745
+ raise TypeError(iterable)
746
+
747
+
748
+ __unwrapped = WeakKeyDictionary()
749
+
750
+
751
+ def _unwrap(
752
+ wrapped_func: Union[
753
+ AnyFn[P, T], "ASyncMethodDescriptor[P, T]", _ASyncPropertyDescriptorBase[I, T]
754
+ ],
755
+ ) -> Callable[P, Awaitable[T]]:
756
+ if unwrapped := __unwrapped.get(wrapped_func):
757
+ return unwrapped
758
+ if isinstance(wrapped_func, (ASyncBoundMethod, ASyncMethodDescriptor)):
759
+ unwrapped = wrapped_func
760
+ elif isinstance(wrapped_func, _ASyncPropertyDescriptorBase):
761
+ unwrapped = wrapped_func.get
762
+ elif isinstance(wrapped_func, ASyncFunction):
763
+ # this speeds things up a bit by bypassing some logic
764
+ # TODO implement it like this elsewhere if profilers suggest
765
+ unwrapped = (
766
+ wrapped_func._modified_fn if wrapped_func.is_async_def() else wrapped_func._asyncified
767
+ )
768
+ else:
769
+ unwrapped = wrapped_func
770
+ __unwrapped[wrapped_func] = unwrapped
771
+ return unwrapped
772
+
773
+
774
+ _get_key: Callable[[Tuple[K, V]], K] = lambda k_and_v: k_and_v[0]
775
+ _get_value: Callable[[Tuple[K, V]], V] = lambda k_and_v: k_and_v[1]
776
+
777
+
778
+ class _TaskMappingView(ASyncGenericBase, Iterable[T], Generic[T, K, V]):
779
+ """
780
+ Base class for TaskMapping views that provides common functionality.
781
+ """
782
+
783
+ _get_from_item: Callable[[Tuple[K, V]], T]
784
+ _pop: bool = False
785
+
786
+ __slots__ = "__view__", "__mapping__"
787
+
788
+ def __init__(
789
+ self, view: Iterable[T], task_mapping: TaskMapping[K, V], pop: bool = False
790
+ ) -> None:
791
+ self.__view__ = view
792
+ self.__mapping__: TaskMapping = proxy(task_mapping)
793
+ "actually a weakref.ProxyType[TaskMapping] but then type hints weren't working"
794
+ if pop:
795
+ self._pop = True
796
+
797
+ def __iter__(self) -> Iterator[T]:
798
+ return iter(self.__view__)
799
+
800
+ def __await__(self) -> Generator[Any, None, List[T]]:
801
+ return self.__await().__await__()
802
+
803
+ def __len__(self) -> int:
804
+ return len(self.__view__)
805
+
806
+ async def aiterbykeys(self, reverse: bool = False) -> ASyncIterator[T]:
807
+ async for tup in ASyncSorter(
808
+ self.__mapping__.items(pop=self._pop), key=_get_key, reverse=reverse
809
+ ):
810
+ yield self._get_from_item(tup)
811
+
812
+ async def aiterbyvalues(self, reverse: bool = False) -> ASyncIterator[T]:
813
+ async for tup in ASyncSorter(
814
+ self.__mapping__.items(pop=self._pop), key=_get_value, reverse=reverse
815
+ ):
816
+ yield self._get_from_item(tup)
817
+
818
+ async def __await(self) -> List[T]:
819
+ return [result async for result in self]
820
+
821
+
822
+ class TaskMappingKeys(_TaskMappingView[K, K, V], Generic[K, V]):
823
+ """
824
+ Asynchronous view to iterate over the keys of a TaskMapping.
825
+ """
826
+
827
+ _get_from_item = lambda self, item: _get_key(item)
828
+
829
+ async def __aiter__(self) -> AsyncIterator[K]:
830
+ # strongref
831
+ mapping = self.__mapping__
832
+ mapping._if_pop_check_destroyed(self._pop)
833
+ yielded = set()
834
+ add_yielded = yielded.add
835
+ for key in self.__load_existing():
836
+ add_yielded(key)
837
+ # there is no chance of duplicate keys here
838
+ yield key
839
+ if mapping._init_loader is None:
840
+ await mapping._if_pop_clear(self._pop)
841
+ return
842
+ async for key in self.__load_init_loader(yielded):
843
+ add_yielded(key)
844
+ yield key
845
+ if self._pop:
846
+ # don't need to check yielded since we've been popping them as we go
847
+ for key in self.__load_existing():
848
+ yield key
849
+ await mapping._if_pop_clear(True)
850
+ else:
851
+ for key in self.__load_existing():
852
+ if key not in yielded:
853
+ yield key
854
+
855
+ def __load_existing(self) -> Iterator[K]:
856
+ # strongref
857
+ mapping = self.__mapping__
858
+ if self._pop:
859
+ pop = mapping.pop
860
+ for key in tuple(mapping):
861
+ pop(key)
862
+ yield key
863
+ else:
864
+ yield from tuple(mapping)
865
+
866
+ async def __load_init_loader(self, yielded: Set[K]) -> AsyncIterator[K]:
867
+ # sourcery skip: hoist-loop-from-if
868
+ # strongref
869
+ mapping = self.__mapping__
870
+ done = mapping._init_loader.done
871
+ wait_for_next_key = mapping._wait_for_next_key
872
+
873
+ if self._pop:
874
+ pop = mapping.pop
875
+ while not done():
876
+ await wait_for_next_key()
877
+ for key in tuple(filterfalse(yielded.__contains__, mapping)):
878
+ pop(key)
879
+ yield key
880
+ else:
881
+ while not done():
882
+ await wait_for_next_key()
883
+ for key in tuple(filterfalse(yielded.__contains__, mapping)):
884
+ yield key
885
+ # check for any exceptions
886
+ await mapping._init_loader
887
+
888
+
889
+ class TaskMappingItems(_TaskMappingView[Tuple[K, V], K, V], Generic[K, V]):
890
+ """
891
+ Asynchronous view to iterate over the items (key-value pairs) of a TaskMapping.
892
+ """
893
+
894
+ _get_from_item = lambda self, item: item
895
+
896
+ async def __aiter__(self) -> AsyncIterator[Tuple[K, V]]:
897
+ # strongref
898
+ mapping = self.__mapping__
899
+ mapping._if_pop_check_destroyed(self._pop)
900
+ if self._pop:
901
+ pop = mapping.pop
902
+ async for key in mapping.keys():
903
+ yield key, await pop(key)
904
+ else:
905
+ async for key in mapping.keys():
906
+ yield key, await mapping[key]
907
+
908
+
909
+ class TaskMappingValues(_TaskMappingView[V, K, V], Generic[K, V]):
910
+ """
911
+ Asynchronous view to iterate over the values of a TaskMapping.
912
+ """
913
+
914
+ _get_from_item = lambda self, item: _get_value(item)
915
+
916
+ async def __aiter__(self) -> AsyncIterator[V]:
917
+ # strongref
918
+ mapping = self.__mapping__
919
+ mapping._if_pop_check_destroyed(self._pop)
920
+ if self._pop:
921
+ pop = mapping.pop
922
+ async for key in mapping.keys():
923
+ yield await pop(key)
924
+ else:
925
+ async for key in mapping.keys():
926
+ yield await mapping[key]
927
+
928
+
929
+ __all__ = [
930
+ "TaskMapping",
931
+ "TaskMappingKeys",
932
+ "TaskMappingValues",
933
+ "TaskMappingItems",
934
+ ]