ez-a-sync 0.22.14__py3-none-any.whl → 0.22.16__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ez-a-sync might be problematic. Click here for more details.

Files changed (73) hide show
  1. a_sync/ENVIRONMENT_VARIABLES.py +37 -5
  2. a_sync/__init__.py +53 -12
  3. a_sync/_smart.py +231 -28
  4. a_sync/_typing.py +112 -15
  5. a_sync/a_sync/__init__.py +35 -10
  6. a_sync/a_sync/_descriptor.py +248 -38
  7. a_sync/a_sync/_flags.py +78 -9
  8. a_sync/a_sync/_helpers.py +46 -13
  9. a_sync/a_sync/_kwargs.py +33 -8
  10. a_sync/a_sync/_meta.py +149 -28
  11. a_sync/a_sync/abstract.py +150 -28
  12. a_sync/a_sync/base.py +34 -16
  13. a_sync/a_sync/config.py +85 -14
  14. a_sync/a_sync/decorator.py +441 -139
  15. a_sync/a_sync/function.py +709 -147
  16. a_sync/a_sync/method.py +437 -110
  17. a_sync/a_sync/modifiers/__init__.py +85 -5
  18. a_sync/a_sync/modifiers/cache/__init__.py +116 -17
  19. a_sync/a_sync/modifiers/cache/memory.py +130 -20
  20. a_sync/a_sync/modifiers/limiter.py +101 -22
  21. a_sync/a_sync/modifiers/manager.py +142 -16
  22. a_sync/a_sync/modifiers/semaphores.py +121 -15
  23. a_sync/a_sync/property.py +383 -82
  24. a_sync/a_sync/singleton.py +44 -19
  25. a_sync/aliases.py +0 -1
  26. a_sync/asyncio/__init__.py +140 -1
  27. a_sync/asyncio/as_completed.py +213 -79
  28. a_sync/asyncio/create_task.py +70 -20
  29. a_sync/asyncio/gather.py +125 -58
  30. a_sync/asyncio/utils.py +3 -3
  31. a_sync/exceptions.py +248 -26
  32. a_sync/executor.py +164 -69
  33. a_sync/future.py +1227 -168
  34. a_sync/iter.py +173 -56
  35. a_sync/primitives/__init__.py +14 -2
  36. a_sync/primitives/_debug.py +72 -18
  37. a_sync/primitives/_loggable.py +41 -10
  38. a_sync/primitives/locks/__init__.py +5 -2
  39. a_sync/primitives/locks/counter.py +107 -38
  40. a_sync/primitives/locks/event.py +21 -7
  41. a_sync/primitives/locks/prio_semaphore.py +262 -63
  42. a_sync/primitives/locks/semaphore.py +138 -89
  43. a_sync/primitives/queue.py +601 -60
  44. a_sync/sphinx/__init__.py +0 -1
  45. a_sync/sphinx/ext.py +160 -50
  46. a_sync/task.py +313 -112
  47. a_sync/utils/__init__.py +12 -6
  48. a_sync/utils/iterators.py +170 -50
  49. {ez_a_sync-0.22.14.dist-info → ez_a_sync-0.22.16.dist-info}/METADATA +1 -1
  50. ez_a_sync-0.22.16.dist-info/RECORD +74 -0
  51. {ez_a_sync-0.22.14.dist-info → ez_a_sync-0.22.16.dist-info}/WHEEL +1 -1
  52. tests/conftest.py +1 -2
  53. tests/executor.py +250 -9
  54. tests/fixtures.py +61 -32
  55. tests/test_abstract.py +22 -4
  56. tests/test_as_completed.py +54 -21
  57. tests/test_base.py +264 -19
  58. tests/test_cache.py +31 -15
  59. tests/test_decorator.py +54 -28
  60. tests/test_executor.py +31 -13
  61. tests/test_future.py +45 -8
  62. tests/test_gather.py +8 -2
  63. tests/test_helpers.py +2 -0
  64. tests/test_iter.py +55 -13
  65. tests/test_limiter.py +5 -3
  66. tests/test_meta.py +23 -9
  67. tests/test_modified.py +4 -1
  68. tests/test_semaphore.py +15 -8
  69. tests/test_singleton.py +28 -11
  70. tests/test_task.py +162 -36
  71. ez_a_sync-0.22.14.dist-info/RECORD +0 -74
  72. {ez_a_sync-0.22.14.dist-info → ez_a_sync-0.22.16.dist-info}/LICENSE.txt +0 -0
  73. {ez_a_sync-0.22.14.dist-info → ez_a_sync-0.22.16.dist-info}/top_level.txt +0 -0
a_sync/task.py CHANGED
@@ -1,3 +1,12 @@
1
+ """
2
+ This module provides asynchronous task management utilities, specifically focused on creating and handling mappings of tasks.
3
+
4
+ The main components include:
5
+ - TaskMapping: A class for managing and asynchronously generating tasks based on input iterables.
6
+ - TaskMappingKeys: A view to asynchronously iterate over the keys of a TaskMapping.
7
+ - TaskMappingValues: A view to asynchronously iterate over the values of a TaskMapping.
8
+ - TaskMappingItems: A view to asynchronously iterate over the items (key-value pairs) of a TaskMapping.
9
+ """
1
10
 
2
11
  import asyncio
3
12
  import contextlib
@@ -12,7 +21,11 @@ from a_sync._typing import *
12
21
  from a_sync.a_sync import _kwargs
13
22
  from a_sync.a_sync.base import ASyncGenericBase
14
23
  from a_sync.a_sync.function import ASyncFunction
15
- from a_sync.a_sync.method import ASyncBoundMethod, ASyncMethodDescriptor, ASyncMethodDescriptorSyncDefault
24
+ from a_sync.a_sync.method import (
25
+ ASyncBoundMethod,
26
+ ASyncMethodDescriptor,
27
+ ASyncMethodDescriptorSyncDefault,
28
+ )
16
29
  from a_sync.a_sync.property import _ASyncPropertyDescriptorBase
17
30
  from a_sync.asyncio.as_completed import as_completed
18
31
  from a_sync.asyncio.gather import Excluder, gather
@@ -25,30 +38,39 @@ from a_sync.utils.iterators import as_yielded, exhaust_iterator
25
38
  logger = logging.getLogger(__name__)
26
39
 
27
40
 
28
-
29
41
  MappingFn = Callable[Concatenate[K, P], Awaitable[V]]
30
42
 
43
+
31
44
  class TaskMapping(DefaultDict[K, "asyncio.Task[V]"], AsyncIterable[Tuple[K, V]]):
32
45
  """
33
46
  A mapping of keys to asynchronous tasks with additional functionality.
34
47
 
35
- TaskMapping is a specialized dictionary that maps keys to asyncio Tasks. It provides
48
+ `TaskMapping` is a specialized dictionary that maps keys to `asyncio` Tasks. It provides
36
49
  convenient methods for creating, managing, and iterating over these tasks asynchronously.
37
50
 
38
- Example:
39
- async def fetch_data(url: str) -> str:
40
- async with aiohttp.ClientSession() as session:
41
- async with session.get(url) as response:
42
- return await response.text()
51
+ Tasks are created automatically for each key using a provided function. You cannot manually set items in a `TaskMapping` using dictionary-like syntax.
43
52
 
44
- tasks = TaskMapping(fetch_data, name='url_fetcher', concurrency=5)
45
- tasks['example.com'] = 'http://example.com'
46
- tasks['python.org'] = 'https://www.python.org'
47
-
48
- async for key, result in tasks:
49
- print(f"Data for {key}: {result}")
53
+ Example:
54
+ >>> async def fetch_data(url: str) -> str:
55
+ ... async with aiohttp.ClientSession() as session:
56
+ ... async with session.get(url) as response:
57
+ ... return await response.text()
58
+ ...
59
+ >>> tasks = TaskMapping(fetch_data, ['http://example.com', 'https://www.python.org'], name='url_fetcher', concurrency=5)
60
+ >>> async for key, result in tasks:
61
+ ... print(f"Data for {key}: {result}")
62
+ ...
63
+ Data for python.org: http://python.org
64
+ Data for example.com: http://example.com
65
+
66
+ Note:
67
+ You cannot manually set items in a `TaskMapping` using dictionary-like syntax. Tasks are created and managed internally.
68
+
69
+ See Also:
70
+ - :class:`asyncio.Task`
71
+ - :func:`asyncio.create_task`
50
72
  """
51
-
73
+
52
74
  concurrency: Optional[int] = None
53
75
  "The max number of tasks that will run at one time."
54
76
 
@@ -57,8 +79,10 @@ class TaskMapping(DefaultDict[K, "asyncio.Task[V]"], AsyncIterable[Tuple[K, V]])
57
79
 
58
80
  _init_loader: Optional["asyncio.Task[None]"] = None
59
81
  "An asyncio Task used to preload values from the iterables."
60
-
61
- _init_loader_next: Optional[Callable[[], Awaitable[Tuple[Tuple[K, "asyncio.Task[V]"]]]]] = None
82
+
83
+ _init_loader_next: Optional[
84
+ Callable[[], Awaitable[Tuple[Tuple[K, "asyncio.Task[V]"]]]]
85
+ ] = None
62
86
  "A coro function that blocks until the _init_loader starts a new task(s), and then returns a `Tuple[Tuple[K, asyncio.Task[V]]]` with all of the new tasks and the keys that started them."
63
87
 
64
88
  _name: Optional[str] = None
@@ -71,37 +95,45 @@ class TaskMapping(DefaultDict[K, "asyncio.Task[V]"], AsyncIterable[Tuple[K, V]])
71
95
  "Additional keyword arguments passed to `_wrapped_func`."
72
96
 
73
97
  __iterables__: Tuple[AnyIterableOrAwaitableIterable[K], ...] = ()
74
- "The original iterables, if any, used to initialize this mapping."""
75
-
98
+ "The original iterables, if any, used to initialize this mapping."
99
+
76
100
  __init_loader_coro: Optional[Awaitable[None]] = None
77
101
  """An optional asyncio Coroutine to be run by the `_init_loader`"""
78
102
 
79
103
  __slots__ = "_wrapped_func", "__wrapped__", "__dict__", "__weakref__"
104
+
80
105
  # NOTE: maybe since we use so many classvars here we are better off getting rid of slots
81
106
  def __init__(
82
- self,
83
- wrapped_func: MappingFn[K, P, V] = None,
84
- *iterables: AnyIterableOrAwaitableIterable[K],
85
- name: str = '',
86
- concurrency: Optional[int] = None,
107
+ self,
108
+ wrapped_func: MappingFn[K, P, V] = None,
109
+ *iterables: AnyIterableOrAwaitableIterable[K],
110
+ name: str = "",
111
+ concurrency: Optional[int] = None,
87
112
  **wrapped_func_kwargs: P.kwargs,
88
113
  ) -> None:
89
114
  """
90
115
  Initialize a TaskMapping instance.
91
116
 
92
117
  Args:
93
- wrapped_func: A function that takes a key (and optional parameters) and returns an Awaitable.
118
+ wrapped_func: A callable that takes a key and additional parameters and returns an Awaitable.
94
119
  *iterables: Any number of iterables whose elements will be used as keys for task generation.
95
120
  name: An optional name for the tasks created by this mapping.
96
121
  concurrency: Maximum number of tasks to run concurrently.
97
122
  **wrapped_func_kwargs: Additional keyword arguments to be passed to wrapped_func.
123
+
124
+ Example:
125
+ async def process_item(item: int) -> int:
126
+ await asyncio.sleep(1)
127
+ return item * 2
128
+
129
+ task_map = TaskMapping(process_item, [1, 2, 3], concurrency=2)
98
130
  """
99
131
 
100
132
  if concurrency:
101
133
  self.concurrency = concurrency
102
134
 
103
135
  self.__wrapped__ = wrapped_func
104
- "The original callable used to initialize this mapping without any modifications."""
136
+ "The original callable used to initialize this mapping without any modifications."
105
137
 
106
138
  if iterables:
107
139
  self.__iterables__ = iterables
@@ -110,9 +142,11 @@ class TaskMapping(DefaultDict[K, "asyncio.Task[V]"], AsyncIterable[Tuple[K, V]])
110
142
  self._wrapped_func = wrapped_func
111
143
  "The function used to create tasks for each key."
112
144
 
113
- if isinstance(wrapped_func, ASyncMethodDescriptor):
114
- if _kwargs.get_flag_name(wrapped_func_kwargs) is None:
115
- wrapped_func_kwargs["sync"] = False
145
+ if (
146
+ isinstance(wrapped_func, ASyncMethodDescriptor)
147
+ and _kwargs.get_flag_name(wrapped_func_kwargs) is None
148
+ ):
149
+ wrapped_func_kwargs["sync"] = False
116
150
  if wrapped_func_kwargs:
117
151
  self._wrapped_func_kwargs = wrapped_func_kwargs
118
152
 
@@ -121,48 +155,67 @@ class TaskMapping(DefaultDict[K, "asyncio.Task[V]"], AsyncIterable[Tuple[K, V]])
121
155
 
122
156
  if iterables:
123
157
  self._next = Event(name=f"{self} `_next`")
158
+
124
159
  @functools.wraps(wrapped_func)
125
- async def _wrapped_set_next(*args: P.args, __a_sync_recursion: int = 0, **kwargs: P.kwargs) -> V:
160
+ async def _wrapped_set_next(
161
+ *args: P.args, __a_sync_recursion: int = 0, **kwargs: P.kwargs
162
+ ) -> V:
126
163
  try:
127
164
  return await wrapped_func(*args, **kwargs)
128
165
  except exceptions.SyncModeInAsyncContextError as e:
129
166
  raise Exception(e, self.__wrapped__)
130
167
  except TypeError as e:
131
- if __a_sync_recursion > 2 or not (str(e).startswith(wrapped_func.__name__) and "got multiple values for argument" in str(e)):
168
+ if __a_sync_recursion > 2 or not (
169
+ str(e).startswith(wrapped_func.__name__)
170
+ and "got multiple values for argument" in str(e)
171
+ ):
132
172
  raise
133
173
  # NOTE: args ordering is clashing with provided kwargs. We can handle this in a hacky way.
134
174
  # TODO: perform this check earlier and pre-prepare the args/kwargs ordering
135
175
  new_args = list(args)
136
176
  new_kwargs = dict(kwargs)
137
177
  try:
138
- for i, arg in enumerate(inspect.getfullargspec(self.__wrapped__).args):
178
+ for i, arg in enumerate(
179
+ inspect.getfullargspec(self.__wrapped__).args
180
+ ):
139
181
  if arg in kwargs:
140
182
  new_args.insert(i, new_kwargs.pop(arg))
141
183
  else:
142
184
  break
143
- return await _wrapped_set_next(*new_args, **new_kwargs, __a_sync_recursion=__a_sync_recursion+1)
185
+ return await _wrapped_set_next(
186
+ *new_args,
187
+ **new_kwargs,
188
+ __a_sync_recursion=__a_sync_recursion + 1,
189
+ )
144
190
  except TypeError as e2:
145
- raise e.with_traceback(e.__traceback__) if str(e2) == "unsupported callable" else e2.with_traceback(e2.__traceback__)
191
+ raise (
192
+ e.with_traceback(e.__traceback__)
193
+ if str(e2) == "unsupported callable"
194
+ else e2.with_traceback(e2.__traceback__)
195
+ )
146
196
  finally:
147
197
  self._next.set()
148
198
  self._next.clear()
199
+
149
200
  self._wrapped_func = _wrapped_set_next
150
201
  init_loader_queue: Queue[Tuple[K, "asyncio.Future[V]"]] = Queue()
151
- self.__init_loader_coro = exhaust_iterator(self._tasks_for_iterables(*iterables), queue=init_loader_queue)
202
+ self.__init_loader_coro = exhaust_iterator(
203
+ self._tasks_for_iterables(*iterables), queue=init_loader_queue
204
+ )
152
205
  with contextlib.suppress(_NoRunningLoop):
153
206
  # its okay if we get this exception, we can start the task as soon as the loop starts
154
207
  self._init_loader
155
208
  self._init_loader_next = init_loader_queue.get_all
156
-
209
+
157
210
  def __repr__(self) -> str:
158
211
  return f"<{type(self).__name__} for {self._wrapped_func} kwargs={self._wrapped_func_kwargs} tasks={len(self)} at {hex(id(self))}>"
159
-
212
+
160
213
  def __hash__(self) -> int:
161
214
  return id(self)
162
-
215
+
163
216
  def __setitem__(self, item: Any, value: Any) -> None:
164
217
  raise NotImplementedError("You cannot manually set items in a TaskMapping")
165
-
218
+
166
219
  def __getitem__(self, item: K) -> "asyncio.Task[V]":
167
220
  try:
168
221
  return super().__getitem__(item)
@@ -172,17 +225,17 @@ class TaskMapping(DefaultDict[K, "asyncio.Task[V]"], AsyncIterable[Tuple[K, V]])
172
225
  fut = self._queue.put_nowait(item)
173
226
  else:
174
227
  coro = self._wrapped_func(item, **self._wrapped_func_kwargs)
175
- name = f"{self._name}[{item}]" if self._name else f"{item}",
228
+ name = (f"{self._name}[{item}]" if self._name else f"{item}",)
176
229
  fut = create_task(coro=coro, name=name)
177
230
  super().__setitem__(item, fut)
178
231
  return fut
179
-
232
+
180
233
  def __await__(self) -> Generator[Any, None, Dict[K, V]]:
181
234
  """Wait for all tasks to complete and return a dictionary of the results."""
182
235
  return self.gather(sync=False).__await__()
183
236
 
184
237
  async def __aiter__(self, pop: bool = False) -> AsyncIterator[Tuple[K, V]]:
185
- """aiterate thru all key-task pairs, yielding the key-result pair as each task completes"""
238
+ """Asynchronously iterate through all key-task pairs, yielding the key-result pair as each task completes."""
186
239
  self._if_pop_check_destroyed(pop)
187
240
 
188
241
  # if you inited the TaskMapping with some iterators, we will load those
@@ -195,7 +248,9 @@ class TaskMapping(DefaultDict[K, "asyncio.Task[V]"], AsyncIterable[Tuple[K, V]])
195
248
  while not self._init_loader.done():
196
249
  await self._wait_for_next_key()
197
250
  while unyielded := [key for key in self if key not in yielded]:
198
- if ready := {key: task for key in unyielded if (task:=self[key]).done()}:
251
+ if ready := {
252
+ key: task for key in unyielded if (task := self[key]).done()
253
+ }:
199
254
  if pop:
200
255
  for key, task in ready.items():
201
256
  yield key, await self.pop(key)
@@ -231,34 +286,50 @@ class TaskMapping(DefaultDict[K, "asyncio.Task[V]"], AsyncIterable[Tuple[K, V]])
231
286
 
232
287
  def values(self, pop: bool = False) -> "TaskMappingValues[K, V]":
233
288
  return TaskMappingValues(super().values(), self, pop=pop)
234
-
289
+
235
290
  def items(self, pop: bool = False) -> "TaskMappingValues[K, V]":
236
291
  return TaskMappingItems(super().items(), self, pop=pop)
237
-
292
+
238
293
  async def close(self) -> None:
239
294
  await self._if_pop_clear(True)
240
295
 
241
296
  @ASyncGeneratorFunction
242
- async def map(self, *iterables: AnyIterableOrAwaitableIterable[K], pop: bool = True, yields: Literal['keys', 'both'] = 'both') -> AsyncIterator[Tuple[K, V]]:
297
+ async def map(
298
+ self,
299
+ *iterables: AnyIterableOrAwaitableIterable[K],
300
+ pop: bool = True,
301
+ yields: Literal["keys", "both"] = "both",
302
+ ) -> AsyncIterator[Tuple[K, V]]:
243
303
  """
244
- Asynchronously map iterables to tasks and yield their results.
304
+ Asynchronously map iterables to tasks and yield their results.
245
305
 
246
- Args:
306
+ Args:
247
307
  *iterables: Iterables to map over.
248
308
  pop: Whether to remove tasks from the internal storage once they are completed.
249
309
  yields: Whether to yield 'keys', 'values', or 'both' (key-value pairs).
250
-
251
- Yields:
310
+
311
+ Yields:
252
312
  Depending on `yields`, either keys, values,
253
313
  or tuples of key-value pairs representing the results of completed tasks.
314
+
315
+ Example:
316
+ async def process_item(item: int) -> int:
317
+ await asyncio.sleep(1)
318
+ return item * 2
319
+
320
+ task_map = TaskMapping(process_item)
321
+ async for key, result in task_map.map([1, 2, 3]):
322
+ print(f"Processed {key}: {result}")
254
323
  """
255
324
  self._if_pop_check_destroyed(pop)
256
-
325
+
257
326
  # make sure the init loader is started if needed
258
327
  init_loader = self._init_loader
259
328
  if iterables and init_loader:
260
- raise ValueError("You cannot pass `iterables` to map if the TaskMapping was initialized with an (a)iterable.")
261
-
329
+ raise ValueError(
330
+ "You cannot pass `iterables` to map if the TaskMapping was initialized with an (a)iterable."
331
+ )
332
+
262
333
  try:
263
334
  if iterables:
264
335
  self._raise_if_not_empty()
@@ -269,15 +340,19 @@ class TaskMapping(DefaultDict[K, "asyncio.Task[V]"], AsyncIterable[Tuple[K, V]])
269
340
  except _EmptySequenceError:
270
341
  if len(iterables) > 1:
271
342
  # TODO gotta handle this situation
272
- raise exceptions.EmptySequenceError("bob needs to code something so you can do this, go tell him") from None
343
+ raise exceptions.EmptySequenceError(
344
+ "bob needs to code something so you can do this, go tell him"
345
+ ) from None
273
346
  # just pass thru
274
-
347
+
275
348
  elif init_loader:
276
349
  # check for exceptions if you passed an iterable(s) into the class init
277
350
  await init_loader
278
-
351
+
279
352
  else:
280
- self._raise_if_empty("You must either initialize your TaskMapping with an iterable(s) or provide them during your call to map")
353
+ self._raise_if_empty(
354
+ "You must either initialize your TaskMapping with an iterable(s) or provide them during your call to map"
355
+ )
281
356
 
282
357
  if self:
283
358
  if pop:
@@ -289,7 +364,7 @@ class TaskMapping(DefaultDict[K, "asyncio.Task[V]"], AsyncIterable[Tuple[K, V]])
289
364
  yield _yield(key, value, yields)
290
365
  finally:
291
366
  await self._if_pop_clear(pop)
292
-
367
+
293
368
  @ASyncMethodDescriptorSyncDefault
294
369
  async def all(self, pop: bool = True) -> bool:
295
370
  try:
@@ -301,7 +376,7 @@ class TaskMapping(DefaultDict[K, "asyncio.Task[V]"], AsyncIterable[Tuple[K, V]])
301
376
  return True
302
377
  finally:
303
378
  await self._if_pop_clear(pop)
304
-
379
+
305
380
  @ASyncMethodDescriptorSyncDefault
306
381
  async def any(self, pop: bool = True) -> bool:
307
382
  try:
@@ -313,7 +388,7 @@ class TaskMapping(DefaultDict[K, "asyncio.Task[V]"], AsyncIterable[Tuple[K, V]])
313
388
  return False
314
389
  finally:
315
390
  await self._if_pop_clear(pop)
316
-
391
+
317
392
  @ASyncMethodDescriptorSyncDefault
318
393
  async def max(self, pop: bool = True) -> V:
319
394
  max = None
@@ -322,26 +397,36 @@ class TaskMapping(DefaultDict[K, "asyncio.Task[V]"], AsyncIterable[Tuple[K, V]])
322
397
  if max is None or result > max:
323
398
  max = result
324
399
  except _EmptySequenceError:
325
- raise exceptions.EmptySequenceError("max() arg is an empty sequence") from None
400
+ raise exceptions.EmptySequenceError(
401
+ "max() arg is an empty sequence"
402
+ ) from None
326
403
  if max is None:
327
- raise exceptions.EmptySequenceError("max() arg is an empty sequence") from None
404
+ raise exceptions.EmptySequenceError(
405
+ "max() arg is an empty sequence"
406
+ ) from None
328
407
  return max
329
-
408
+
330
409
  @ASyncMethodDescriptorSyncDefault
331
410
  async def min(self, pop: bool = True) -> V:
411
+ """Return the minimum result from the tasks in the mapping."""
332
412
  min = None
333
413
  try:
334
414
  async for key, result in self.__aiter__(pop=pop):
335
415
  if min is None or result < min:
336
416
  min = result
337
417
  except _EmptySequenceError:
338
- raise exceptions.EmptySequenceError("min() arg is an empty sequence") from None
418
+ raise exceptions.EmptySequenceError(
419
+ "min() arg is an empty sequence"
420
+ ) from None
339
421
  if min is None:
340
- raise exceptions.EmptySequenceError("min() arg is an empty sequence") from None
422
+ raise exceptions.EmptySequenceError(
423
+ "min() arg is an empty sequence"
424
+ ) from None
341
425
  return min
342
-
426
+
343
427
  @ASyncMethodDescriptorSyncDefault
344
428
  async def sum(self, pop: bool = False) -> V:
429
+ """Return the sum of the results from the tasks in the mapping."""
345
430
  retval = 0
346
431
  try:
347
432
  async for key, result in self.__aiter__(pop=pop):
@@ -360,6 +445,15 @@ class TaskMapping(DefaultDict[K, "asyncio.Task[V]"], AsyncIterable[Tuple[K, V]])
360
445
 
361
446
  Yields:
362
447
  Tuples of key-value pairs representing the results of completed tasks.
448
+
449
+ Example:
450
+ async def process_item(item: int) -> int:
451
+ await asyncio.sleep(1)
452
+ return item * 2
453
+
454
+ task_map = TaskMapping(process_item, [1, 2, 3])
455
+ async for key, result in task_map.yield_completed():
456
+ print(f"Completed {key}: {result}")
363
457
  """
364
458
  if pop:
365
459
  for k, task in dict(self).items():
@@ -369,11 +463,11 @@ class TaskMapping(DefaultDict[K, "asyncio.Task[V]"], AsyncIterable[Tuple[K, V]])
369
463
  for k, task in dict(self).items():
370
464
  if task.done():
371
465
  yield k, await task
372
-
466
+
373
467
  @ASyncMethodDescriptorSyncDefault
374
468
  async def gather(
375
- self,
376
- return_exceptions: bool = False,
469
+ self,
470
+ return_exceptions: bool = False,
377
471
  exclude_if: Excluder[V] = None,
378
472
  tqdm: bool = False,
379
473
  **tqdm_kwargs: Any,
@@ -382,19 +476,53 @@ class TaskMapping(DefaultDict[K, "asyncio.Task[V]"], AsyncIterable[Tuple[K, V]])
382
476
  if self._init_loader:
383
477
  await self._init_loader
384
478
  self._raise_if_empty()
385
- return await gather(self, return_exceptions=return_exceptions, exclude_if=exclude_if, tqdm=tqdm, **tqdm_kwargs)
386
-
479
+ return await gather(
480
+ self,
481
+ return_exceptions=return_exceptions,
482
+ exclude_if=exclude_if,
483
+ tqdm=tqdm,
484
+ **tqdm_kwargs,
485
+ )
486
+
387
487
  @overload
388
- def pop(self, item: K, cancel: bool = False) -> "Union[asyncio.Task[V], asyncio.Future[V]]":...
488
+ def pop(
489
+ self, item: K, *, cancel: bool = False
490
+ ) -> "Union[asyncio.Task[V], asyncio.Future[V]]":
491
+ """Pop a task from the TaskMapping.
492
+
493
+ Args:
494
+ item: The key to pop.
495
+ cancel: Whether to cancel the task when popping it.
496
+ """
497
+
389
498
  @overload
390
- def pop(self, item: K, default: K, cancel: bool = False) -> "Union[asyncio.Task[V], asyncio.Future[V]]":...
391
- def pop(self, *args: K, cancel: bool = False) -> "Union[asyncio.Task[V], asyncio.Future[V]]":
499
+ def pop(
500
+ self, item: K, default: K, *, cancel: bool = False
501
+ ) -> "Union[asyncio.Task[V], asyncio.Future[V]]":
502
+ """Pop a task from the TaskMapping.
503
+
504
+ Args:
505
+ item: The key to pop.
506
+ default: The default value to return if no matching key is found.
507
+ cancel: Whether to cancel the task when popping it.
508
+ """
509
+
510
+ def pop(
511
+ self, *args: K, cancel: bool = False
512
+ ) -> "Union[asyncio.Task[V], asyncio.Future[V]]":
513
+ """Pop a task from the TaskMapping.
514
+
515
+ Args:
516
+ *args: One key to pop.
517
+ cancel: Whether to cancel the task when popping it.
518
+ """
392
519
  fut_or_task = super().pop(*args)
393
520
  if cancel:
394
521
  fut_or_task.cancel()
395
522
  return fut_or_task
396
-
523
+
397
524
  def clear(self, cancel: bool = False) -> None:
525
+ """# TODO write docs for this"""
398
526
  if cancel and self._init_loader and not self._init_loader.done():
399
527
  logger.debug("cancelling %s", self._init_loader)
400
528
  # temporary, remove later
@@ -412,106 +540,125 @@ class TaskMapping(DefaultDict[K, "asyncio.Task[V]"], AsyncIterable[Tuple[K, V]])
412
540
  def _init_loader(self) -> Optional["asyncio.Task[None]"]:
413
541
  if self.__init_loader_coro:
414
542
  logger.debug("starting %s init loader", self)
415
- name=f"{type(self).__name__} init loader loading {self.__iterables__} for {self}"
543
+ name = f"{type(self).__name__} init loader loading {self.__iterables__} for {self}"
416
544
  try:
417
545
  task = create_task(coro=self.__init_loader_coro, name=name)
418
546
  except RuntimeError as e:
419
547
  raise _NoRunningLoop if str(e) == "no running event loop" else e
420
548
  task.add_done_callback(self.__cleanup)
421
549
  return task
422
-
550
+
423
551
  @functools.cached_property
424
552
  def _queue(self) -> ProcessingQueue:
425
553
  fn = functools.partial(self._wrapped_func, **self._wrapped_func_kwargs)
426
554
  return ProcessingQueue(fn, self.concurrency, name=self._name)
427
-
428
- def _raise_if_empty(self, msg: str = '') -> None:
555
+
556
+ def _raise_if_empty(self, msg: str = "") -> None:
429
557
  if not self:
430
558
  raise exceptions.MappingIsEmptyError(self, msg)
431
-
559
+
432
560
  def _raise_if_not_empty(self) -> None:
433
561
  if self:
434
562
  raise exceptions.MappingNotEmptyError(self)
435
563
 
436
564
  @ASyncGeneratorFunction
437
- async def _tasks_for_iterables(self, *iterables: AnyIterableOrAwaitableIterable[K]) -> AsyncIterator[Tuple[K, "asyncio.Task[V]"]]:
565
+ async def _tasks_for_iterables(
566
+ self, *iterables: AnyIterableOrAwaitableIterable[K]
567
+ ) -> AsyncIterator[Tuple[K, "asyncio.Task[V]"]]:
438
568
  """Ensure tasks are running for each key in the provided iterables."""
439
569
  # if we have any regular containers we can yield their contents right away
440
- containers = [iterable for iterable in iterables if not isinstance(iterable, AsyncIterable) and isinstance(iterable, Iterable)]
570
+ containers = [
571
+ iterable
572
+ for iterable in iterables
573
+ if not isinstance(iterable, AsyncIterable)
574
+ and isinstance(iterable, Iterable)
575
+ ]
441
576
  for iterable in containers:
442
577
  async for key in _yield_keys(iterable):
443
578
  yield key, self[key]
444
-
445
- if remaining := [iterable for iterable in iterables if iterable not in containers]:
579
+
580
+ if remaining := [
581
+ iterable for iterable in iterables if iterable not in containers
582
+ ]:
446
583
  try:
447
- async for key in as_yielded(*[_yield_keys(iterable) for iterable in remaining]): # type: ignore [attr-defined]
584
+ async for key in as_yielded(*[_yield_keys(iterable) for iterable in remaining]): # type: ignore [attr-defined]
448
585
  yield key, self[key] # ensure task is running
449
586
  except _EmptySequenceError:
450
587
  if len(iterables) == 1:
451
588
  raise
452
- raise RuntimeError("DEV: figure out how to handle this situation") from None
453
-
589
+ raise RuntimeError(
590
+ "DEV: figure out how to handle this situation"
591
+ ) from None
592
+
454
593
  def _if_pop_check_destroyed(self, pop: bool) -> None:
455
594
  if pop:
456
595
  if self._destroyed:
457
596
  raise RuntimeError(f"{self} has already been consumed")
458
597
  self._destroyed = True
459
-
598
+
460
599
  async def _if_pop_clear(self, pop: bool) -> None:
461
600
  if pop:
462
601
  self._destroyed = True
463
602
  # _queue is a cached_property, we don't want to create it if it doesn't exist
464
- if self.concurrency and '_queue' in self.__dict__:
603
+ if self.concurrency and "_queue" in self.__dict__:
465
604
  self._queue.close()
466
605
  del self._queue
467
606
  self.clear(cancel=True)
468
607
  # we need to let the loop run once so the tasks can fully cancel
469
608
  await asyncio.sleep(0)
470
-
609
+
471
610
  async def _wait_for_next_key(self) -> None:
472
611
  # NOTE if `_init_loader` has an exception it will return first, otherwise `_init_loader_next` will return always
473
612
  done, pending = await asyncio.wait(
474
- [create_task(self._init_loader_next(), log_destroy_pending=False), self._init_loader],
475
- return_when=asyncio.FIRST_COMPLETED
613
+ [
614
+ create_task(self._init_loader_next(), log_destroy_pending=False),
615
+ self._init_loader,
616
+ ],
617
+ return_when=asyncio.FIRST_COMPLETED,
476
618
  )
477
619
  for task in done:
478
620
  # check for exceptions
479
621
  await task
480
-
622
+
481
623
  def __cleanup(self, t: "asyncio.Task[None]") -> None:
482
624
  # clear the slot and let the bound Queue die
483
625
  del self.__init_loader_coro
484
626
 
485
627
 
486
- class _NoRunningLoop(Exception):
487
- ...
628
+ class _NoRunningLoop(Exception): ...
629
+
488
630
 
489
631
  @overload
490
- def _yield(key: K, value: V, yields: Literal['keys']) -> K:...
632
+ def _yield(
633
+ key: K, value: V, yields: Literal["keys"]
634
+ ) -> K: ... # TODO write specific docs for this overload
491
635
  @overload
492
- def _yield(key: K, value: V, yields: Literal['both']) -> Tuple[K, V]:...
493
- def _yield(key: K, value: V, yields: Literal['keys', 'both']) -> Union[K, Tuple[K, V]]:
636
+ def _yield(
637
+ key: K, value: V, yields: Literal["both"]
638
+ ) -> Tuple[K, V]: ... # TODO write specific docs for this overload
639
+ def _yield(key: K, value: V, yields: Literal["keys", "both"]) -> Union[K, Tuple[K, V]]:
494
640
  """
495
641
  Yield either the key, value, or both based on the 'yields' parameter.
496
-
642
+
497
643
  Args:
498
644
  key: The key of the task.
499
645
  value: The result of the task.
500
646
  yields: Determines what to yield; 'keys' for keys, 'both' for key-value pairs.
501
-
647
+
502
648
  Returns:
503
649
  The key, the value, or a tuple of both based on the 'yields' parameter.
504
650
  """
505
- if yields == 'both':
651
+ if yields == "both":
506
652
  return key, value
507
- elif yields == 'keys':
653
+ elif yields == "keys":
508
654
  return key
509
655
  else:
510
656
  raise ValueError(f"`yields` must be 'keys' or 'both'. You passed {yields}")
511
657
 
512
- class _EmptySequenceError(ValueError):
513
- ...
514
-
658
+
659
+ class _EmptySequenceError(ValueError): ...
660
+
661
+
515
662
  async def _yield_keys(iterable: AnyIterableOrAwaitableIterable[K]) -> AsyncIterator[K]:
516
663
  """
517
664
  Asynchronously yield keys from the provided iterable.
@@ -536,9 +683,15 @@ async def _yield_keys(iterable: AnyIterableOrAwaitableIterable[K]) -> AsyncItera
536
683
  else:
537
684
  raise TypeError(iterable)
538
685
 
686
+
539
687
  __unwrapped = weakref.WeakKeyDictionary()
540
688
 
541
- def _unwrap(wrapped_func: Union[AnyFn[P, T], "ASyncMethodDescriptor[P, T]", _ASyncPropertyDescriptorBase[I, T]]) -> Callable[P, Awaitable[T]]:
689
+
690
+ def _unwrap(
691
+ wrapped_func: Union[
692
+ AnyFn[P, T], "ASyncMethodDescriptor[P, T]", _ASyncPropertyDescriptorBase[I, T]
693
+ ]
694
+ ) -> Callable[P, Awaitable[T]]:
542
695
  if unwrapped := __unwrapped.get(wrapped_func):
543
696
  return unwrapped
544
697
  if isinstance(wrapped_func, (ASyncBoundMethod, ASyncMethodDescriptor)):
@@ -548,7 +701,11 @@ def _unwrap(wrapped_func: Union[AnyFn[P, T], "ASyncMethodDescriptor[P, T]", _ASy
548
701
  elif isinstance(wrapped_func, ASyncFunction):
549
702
  # this speeds things up a bit by bypassing some logic
550
703
  # TODO implement it like this elsewhere if profilers suggest
551
- unwrapped = wrapped_func._modified_fn if wrapped_func._async_def else wrapped_func._asyncified
704
+ unwrapped = (
705
+ wrapped_func._modified_fn
706
+ if wrapped_func._async_def
707
+ else wrapped_func._asyncified
708
+ )
552
709
  else:
553
710
  unwrapped = wrapped_func
554
711
  __unwrapped[wrapped_func] = unwrapped
@@ -558,34 +715,58 @@ def _unwrap(wrapped_func: Union[AnyFn[P, T], "ASyncMethodDescriptor[P, T]", _ASy
558
715
  _get_key: Callable[[Tuple[K, V]], K] = lambda k_and_v: k_and_v[0]
559
716
  _get_value: Callable[[Tuple[K, V]], V] = lambda k_and_v: k_and_v[1]
560
717
 
718
+
561
719
  class _TaskMappingView(ASyncGenericBase, Iterable[T], Generic[T, K, V]):
720
+ """
721
+ Base class for TaskMapping views that provides common functionality.
722
+ """
723
+
562
724
  _get_from_item: Callable[[Tuple[K, V]], T]
563
725
  _pop: bool = False
564
- def __init__(self, view: Iterable[T], task_mapping: TaskMapping[K, V], pop: bool = False) -> None:
726
+
727
+ def __init__(
728
+ self, view: Iterable[T], task_mapping: TaskMapping[K, V], pop: bool = False
729
+ ) -> None:
565
730
  self.__view__ = view
566
731
  self.__mapping__: TaskMapping = weakref.proxy(task_mapping)
567
732
  "actually a weakref.ProxyType[TaskMapping] but then type hints weren't working"
568
733
  if pop:
569
734
  self._pop = True
735
+
570
736
  def __iter__(self) -> Iterator[T]:
571
737
  return iter(self.__view__)
738
+
572
739
  def __await__(self) -> Generator[Any, None, List[T]]:
573
740
  return self._await().__await__()
741
+
574
742
  def __len__(self) -> int:
575
743
  return len(self.__view__)
744
+
576
745
  async def _await(self) -> List[T]:
577
746
  return [result async for result in self]
747
+
578
748
  __slots__ = "__view__", "__mapping__"
749
+
579
750
  async def aiterbykeys(self, reverse: bool = False) -> ASyncIterator[T]:
580
- async for tup in ASyncSorter(self.__mapping__.items(pop=self._pop), key=_get_key, reverse=reverse):
751
+ async for tup in ASyncSorter(
752
+ self.__mapping__.items(pop=self._pop), key=_get_key, reverse=reverse
753
+ ):
581
754
  yield self._get_from_item(tup)
755
+
582
756
  async def aiterbyvalues(self, reverse: bool = False) -> ASyncIterator[T]:
583
- async for tup in ASyncSorter(self.__mapping__.items(pop=self._pop), key=_get_value, reverse=reverse):
757
+ async for tup in ASyncSorter(
758
+ self.__mapping__.items(pop=self._pop), key=_get_value, reverse=reverse
759
+ ):
584
760
  yield self._get_from_item(tup)
585
761
 
586
762
 
587
763
  class TaskMappingKeys(_TaskMappingView[K, K, V], Generic[K, V]):
764
+ """
765
+ Asynchronous view to iterate over the keys of a TaskMapping.
766
+ """
767
+
588
768
  _get_from_item = lambda self, item: _get_key(item)
769
+
589
770
  async def __aiter__(self) -> AsyncIterator[K]:
590
771
  # strongref
591
772
  mapping = self.__mapping__
@@ -610,6 +791,7 @@ class TaskMappingKeys(_TaskMappingView[K, K, V], Generic[K, V]):
610
791
  for key in self.__load_existing():
611
792
  if key not in yielded:
612
793
  yield key
794
+
613
795
  def __load_existing(self) -> Iterator[K]:
614
796
  # strongref
615
797
  mapping = self.__mapping__
@@ -620,6 +802,7 @@ class TaskMappingKeys(_TaskMappingView[K, K, V], Generic[K, V]):
620
802
  else:
621
803
  for key in tuple(mapping):
622
804
  yield key
805
+
623
806
  async def __load_init_loader(self, yielded: Set[K]) -> AsyncIterator[K]:
624
807
  # strongref
625
808
  mapping = self.__mapping__
@@ -637,8 +820,14 @@ class TaskMappingKeys(_TaskMappingView[K, K, V], Generic[K, V]):
637
820
  # check for any exceptions
638
821
  await mapping._init_loader
639
822
 
823
+
640
824
  class TaskMappingItems(_TaskMappingView[Tuple[K, V], K, V], Generic[K, V]):
825
+ """
826
+ Asynchronous view to iterate over the items (key-value pairs) of a TaskMapping.
827
+ """
828
+
641
829
  _get_from_item = lambda self, item: item
830
+
642
831
  async def __aiter__(self) -> AsyncIterator[Tuple[K, V]]:
643
832
  # strongref
644
833
  mapping = self.__mapping__
@@ -649,9 +838,15 @@ class TaskMappingItems(_TaskMappingView[Tuple[K, V], K, V], Generic[K, V]):
649
838
  else:
650
839
  async for key in mapping.keys():
651
840
  yield key, await mapping[key]
652
-
841
+
842
+
653
843
  class TaskMappingValues(_TaskMappingView[V, K, V], Generic[K, V]):
844
+ """
845
+ Asynchronous view to iterate over the values of a TaskMapping.
846
+ """
847
+
654
848
  _get_from_item = lambda self, item: _get_value(item)
849
+
655
850
  async def __aiter__(self) -> AsyncIterator[V]:
656
851
  # strongref
657
852
  mapping = self.__mapping__
@@ -664,4 +859,10 @@ class TaskMappingValues(_TaskMappingView[V, K, V], Generic[K, V]):
664
859
  yield await mapping[key]
665
860
 
666
861
 
667
- __all__ = ["create_task", "TaskMapping", "TaskMappingKeys", "TaskMappingValues", "TaskMappingItems"]
862
+ __all__ = [
863
+ "create_task",
864
+ "TaskMapping",
865
+ "TaskMappingKeys",
866
+ "TaskMappingValues",
867
+ "TaskMappingItems",
868
+ ]