ez-a-sync 0.22.15__py3-none-any.whl → 0.22.16__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ez-a-sync might be problematic. Click here for more details.

Files changed (49) hide show
  1. a_sync/ENVIRONMENT_VARIABLES.py +34 -3
  2. a_sync/__init__.py +32 -9
  3. a_sync/_smart.py +105 -6
  4. a_sync/_typing.py +56 -3
  5. a_sync/a_sync/_descriptor.py +174 -12
  6. a_sync/a_sync/_flags.py +64 -3
  7. a_sync/a_sync/_helpers.py +40 -8
  8. a_sync/a_sync/_kwargs.py +30 -6
  9. a_sync/a_sync/_meta.py +35 -6
  10. a_sync/a_sync/abstract.py +57 -9
  11. a_sync/a_sync/config.py +44 -7
  12. a_sync/a_sync/decorator.py +217 -37
  13. a_sync/a_sync/function.py +339 -47
  14. a_sync/a_sync/method.py +241 -52
  15. a_sync/a_sync/modifiers/__init__.py +39 -1
  16. a_sync/a_sync/modifiers/cache/__init__.py +75 -5
  17. a_sync/a_sync/modifiers/cache/memory.py +50 -6
  18. a_sync/a_sync/modifiers/limiter.py +55 -6
  19. a_sync/a_sync/modifiers/manager.py +46 -2
  20. a_sync/a_sync/modifiers/semaphores.py +84 -11
  21. a_sync/a_sync/singleton.py +43 -19
  22. a_sync/asyncio/__init__.py +137 -1
  23. a_sync/asyncio/as_completed.py +44 -38
  24. a_sync/asyncio/create_task.py +46 -10
  25. a_sync/asyncio/gather.py +72 -25
  26. a_sync/exceptions.py +178 -11
  27. a_sync/executor.py +51 -3
  28. a_sync/future.py +671 -29
  29. a_sync/iter.py +64 -7
  30. a_sync/primitives/_debug.py +59 -5
  31. a_sync/primitives/_loggable.py +36 -6
  32. a_sync/primitives/locks/counter.py +74 -7
  33. a_sync/primitives/locks/prio_semaphore.py +87 -8
  34. a_sync/primitives/locks/semaphore.py +68 -20
  35. a_sync/primitives/queue.py +65 -26
  36. a_sync/task.py +51 -15
  37. a_sync/utils/iterators.py +52 -16
  38. {ez_a_sync-0.22.15.dist-info → ez_a_sync-0.22.16.dist-info}/METADATA +1 -1
  39. ez_a_sync-0.22.16.dist-info/RECORD +74 -0
  40. {ez_a_sync-0.22.15.dist-info → ez_a_sync-0.22.16.dist-info}/WHEEL +1 -1
  41. tests/executor.py +150 -12
  42. tests/test_abstract.py +15 -0
  43. tests/test_base.py +198 -2
  44. tests/test_executor.py +23 -0
  45. tests/test_singleton.py +13 -1
  46. tests/test_task.py +45 -17
  47. ez_a_sync-0.22.15.dist-info/RECORD +0 -74
  48. {ez_a_sync-0.22.15.dist-info → ez_a_sync-0.22.16.dist-info}/LICENSE.txt +0 -0
  49. {ez_a_sync-0.22.15.dist-info → ez_a_sync-0.22.16.dist-info}/top_level.txt +0 -0
@@ -18,31 +18,34 @@ logger = logging.getLogger(__name__)
18
18
 
19
19
  class Semaphore(asyncio.Semaphore, _DebugDaemonMixin):
20
20
  """
21
- A semaphore with additional debugging capabilities.
21
+ A semaphore with additional debugging capabilities inherited from :class:`_DebugDaemonMixin`.
22
22
 
23
- This semaphore includes debug logging and can be used to decorate coroutine functions.
23
+ This semaphore includes debug logging capabilities that are activated when the semaphore has waiters.
24
24
  It allows rewriting the pattern of acquiring a semaphore within a coroutine using a decorator.
25
25
 
26
- So you can write this pattern:
26
+ Example:
27
+ You can write this pattern:
27
28
 
28
- ```
29
- semaphore = Semaphore(5)
29
+ ```
30
+ semaphore = Semaphore(5)
30
31
 
31
- async def limited():
32
- async with semaphore:
33
- return 1
32
+ async def limited():
33
+ async with semaphore:
34
+ return 1
35
+ ```
34
36
 
35
- ```
37
+ like this:
36
38
 
37
- like this:
39
+ ```
40
+ semaphore = Semaphore(5)
38
41
 
39
- ```
40
- semaphore = Semaphore(5)
42
+ @semaphore
43
+ async def limited():
44
+ return 1
45
+ ```
41
46
 
42
- @semaphore
43
- async def limited():
44
- return 1
45
- ```
47
+ See Also:
48
+ :class:`_DebugDaemonMixin` for more details on debugging capabilities.
46
49
  """
47
50
 
48
51
  if sys.version_info >= (3, 10):
@@ -109,6 +112,14 @@ class Semaphore(asyncio.Semaphore, _DebugDaemonMixin):
109
112
  return semaphore_wrapper
110
113
 
111
114
  async def acquire(self) -> Literal[True]:
115
+ """
116
+ Acquire the semaphore, ensuring that debug logging is enabled if there are waiters.
117
+
118
+ If the semaphore value is zero or less, the debug daemon is started to log the state of the semaphore.
119
+
120
+ Returns:
121
+ True when the semaphore is successfully acquired.
122
+ """
112
123
  if self._value <= 0:
113
124
  self._ensure_debug_daemon()
114
125
  return await super().acquire()
@@ -116,6 +127,15 @@ class Semaphore(asyncio.Semaphore, _DebugDaemonMixin):
116
127
  async def _debug_daemon(self) -> None:
117
128
  """
118
129
  Daemon coroutine (runs in a background task) which will emit a debug log every minute while the semaphore has waiters.
130
+
131
+ This method is part of the :class:`_DebugDaemonMixin` and is used to provide detailed logging information
132
+ about the semaphore's state when it is being waited on.
133
+
134
+ Example:
135
+ semaphore = Semaphore(5)
136
+
137
+ async def monitor():
138
+ await semaphore._debug_daemon()
119
139
  """
120
140
  while self._waiters:
121
141
  await asyncio.sleep(60)
@@ -129,6 +149,13 @@ class DummySemaphore(asyncio.Semaphore):
129
149
  A dummy semaphore that implements the standard :class:`asyncio.Semaphore` API but does nothing.
130
150
 
131
151
  This class is useful for scenarios where a semaphore interface is required but no actual synchronization is needed.
152
+
153
+ Example:
154
+ dummy_semaphore = DummySemaphore()
155
+
156
+ async def no_op():
157
+ async with dummy_semaphore:
158
+ return 1
132
159
  """
133
160
 
134
161
  __slots__ = "name", "_value"
@@ -149,11 +176,14 @@ class DummySemaphore(asyncio.Semaphore):
149
176
  async def acquire(self) -> Literal[True]:
150
177
  return True
151
178
 
152
- def release(self) -> None: ...
179
+ def release(self) -> None:
180
+ """No-op release method."""
153
181
 
154
- async def __aenter__(self): ...
182
+ async def __aenter__(self):
183
+ """No-op context manager entry."""
155
184
 
156
- async def __aexit__(self, *args): ...
185
+ async def __aexit__(self, *args):
186
+ """No-op context manager exit."""
157
187
 
158
188
 
159
189
  class ThreadsafeSemaphore(Semaphore):
@@ -161,7 +191,18 @@ class ThreadsafeSemaphore(Semaphore):
161
191
  A semaphore that works in a multi-threaded environment.
162
192
 
163
193
  This semaphore ensures that the program functions correctly even when used with multiple event loops.
164
- It provides a workaround for edge cases involving multiple threads and event loops.
194
+ It provides a workaround for edge cases involving multiple threads and event loops by using a separate semaphore
195
+ for each thread.
196
+
197
+ Example:
198
+ semaphore = ThreadsafeSemaphore(5)
199
+
200
+ async def limited():
201
+ async with semaphore:
202
+ return 1
203
+
204
+ See Also:
205
+ :class:`Semaphore` for the base class implementation.
165
206
  """
166
207
 
167
208
  __slots__ = "semaphores", "dummy"
@@ -198,6 +239,13 @@ class ThreadsafeSemaphore(Semaphore):
198
239
  Returns the appropriate semaphore for the current thread.
199
240
 
200
241
  NOTE: We can't cache this property because we need to check the current thread every time we access it.
242
+
243
+ Example:
244
+ semaphore = ThreadsafeSemaphore(5)
245
+
246
+ async def limited():
247
+ async with semaphore.semaphore:
248
+ return 1
201
249
  """
202
250
  return self.dummy if self.use_dummy else self.semaphores[current_thread()]
203
251
 
@@ -1,8 +1,16 @@
1
1
  """
2
- This module provides various queue implementations for managing asynchronous tasks,
3
- including standard FIFO queues, priority queues, and processing queues.
4
- # TODO specify a list of specific objects with a brief description + use case example instead of being vague like this
5
-
2
+ This module provides various queue implementations for managing asynchronous tasks.
3
+ It includes standard FIFO queues, priority queues, and processing queues with enhanced functionality.
4
+
5
+ Classes:
6
+ Queue: A generic asynchronous queue that extends the functionality of `asyncio.Queue`.
7
+ ProcessingQueue: A queue designed for processing tasks asynchronously with multiple workers.
8
+ PriorityProcessingQueue: A priority-based processing queue where tasks are processed based on priority.
9
+ SmartProcessingQueue: A processing queue that executes jobs with the most waiters first, supporting dynamic priorities.
10
+
11
+ See Also:
12
+ `asyncio.Queue`: The base class for asynchronous FIFO queues.
13
+ `asyncio.PriorityQueue`: The base class for priority queues.
6
14
  """
7
15
 
8
16
  import asyncio
@@ -38,7 +46,7 @@ else:
38
46
 
39
47
  class Queue(_Queue[T]):
40
48
  """
41
- A generic asynchronous queue that extends the functionality of asyncio.Queue.
49
+ A generic asynchronous queue that extends the functionality of `asyncio.Queue`.
42
50
 
43
51
  This implementation supports retrieving multiple items at once and handling
44
52
  task processing in both FIFO and LIFO order. It provides enhanced type hinting
@@ -66,9 +74,6 @@ class Queue(_Queue[T]):
66
74
 
67
75
  If the queue is empty, this method will block until an item is available.
68
76
 
69
- Returns:
70
- T: The next item in the queue.
71
-
72
77
  Example:
73
78
  >>> result = await queue.get()
74
79
  >>> print(result)
@@ -85,9 +90,6 @@ class Queue(_Queue[T]):
85
90
  Raises:
86
91
  :exc:`~asyncio.QueueEmpty`: If the queue is empty.
87
92
 
88
- Returns:
89
- T: The next item in the queue.
90
-
91
93
  Example:
92
94
  >>> result = queue.get_nowait()
93
95
  >>> print(result)
@@ -133,9 +135,6 @@ class Queue(_Queue[T]):
133
135
  If the queue is empty, this method will wait until at least one item
134
136
  is available before returning.
135
137
 
136
- Returns:
137
- List[T]: A list of all items that were in the queue.
138
-
139
138
  Example:
140
139
  >>> tasks = await queue.get_all()
141
140
  >>> print(tasks)
@@ -155,9 +154,6 @@ class Queue(_Queue[T]):
155
154
  Raises:
156
155
  :exc:`~asyncio.QueueEmpty`: If the queue is empty.
157
156
 
158
- Returns:
159
- List[T]: A list of all items that were in the queue.
160
-
161
157
  Example:
162
158
  >>> tasks = queue.get_all_nowait()
163
159
  >>> print(tasks)
@@ -179,9 +175,6 @@ class Queue(_Queue[T]):
179
175
  i: The number of items to retrieve.
180
176
  can_return_less: If True, may return fewer than `i` items if queue is emptied.
181
177
 
182
- Returns:
183
- List[T]: A list containing the retrieved items.
184
-
185
178
  Raises:
186
179
  :exc:`~asyncio.QueueEmpty`: If no items are available and fewer items cannot be returned.
187
180
 
@@ -211,9 +204,6 @@ class Queue(_Queue[T]):
211
204
  Raises:
212
205
  :exc:`~asyncio.QueueEmpty`: If no items are available and fewer items cannot be returned.
213
206
 
214
- Returns:
215
- List[T]: A list containing the retrieved items.
216
-
217
207
  Example:
218
208
  >>> tasks = queue.get_multi_nowait(i=3, can_return_less=True)
219
209
  >>> print(tasks)
@@ -548,6 +538,9 @@ def _validate_args(i: int, can_return_less: bool) -> None:
548
538
  class _SmartFutureRef(weakref.ref, Generic[T]):
549
539
  """
550
540
  Weak reference for :class:`~_smart.SmartFuture` objects used in priority queues.
541
+
542
+ See Also:
543
+ :class:`~_smart.SmartFuture`
551
544
  """
552
545
 
553
546
  def __lt__(self, other: "_SmartFutureRef[T]") -> bool:
@@ -578,6 +571,9 @@ class _SmartFutureRef(weakref.ref, Generic[T]):
578
571
  class _PriorityQueueMixin(Generic[T]):
579
572
  """
580
573
  Mixin for creating priority queue functionality with support for custom comparison.
574
+
575
+ See Also:
576
+ :class:`~asyncio.PriorityQueue`
581
577
  """
582
578
 
583
579
  def _init(self, maxsize):
@@ -612,7 +608,20 @@ class _PriorityQueueMixin(Generic[T]):
612
608
  class PriorityProcessingQueue(_PriorityQueueMixin[T], ProcessingQueue[T, V]):
613
609
  """
614
610
  A priority-based processing queue where tasks are processed based on priority.
615
- # NOTE: WIP
611
+
612
+ This queue allows tasks to be added with a specified priority, ensuring that
613
+ higher priority tasks are processed before lower priority ones. It is ideal
614
+ for scenarios where task prioritization is crucial.
615
+
616
+ Example:
617
+ >>> async def process_task(data): return data.upper()
618
+ >>> queue = PriorityProcessingQueue(func=process_task, num_workers=5)
619
+ >>> fut = await queue.put(priority=1, item='task')
620
+ >>> print(await fut)
621
+ TASK
622
+
623
+ See Also:
624
+ :class:`~ProcessingQueue`
616
625
  """
617
626
 
618
627
  async def put(
@@ -679,6 +688,9 @@ class PriorityProcessingQueue(_PriorityQueueMixin[T], ProcessingQueue[T, V]):
679
688
  class _VariablePriorityQueueMixin(_PriorityQueueMixin[T]):
680
689
  """
681
690
  Mixin for priority queues where task priorities can be updated dynamically.
691
+
692
+ See Also:
693
+ :class:`~_PriorityQueueMixin`
682
694
  """
683
695
 
684
696
  def _get(self, heapify=heapq.heapify, heappop=heapq.heappop):
@@ -721,7 +733,20 @@ class _VariablePriorityQueueMixin(_PriorityQueueMixin[T]):
721
733
  class VariablePriorityQueue(_VariablePriorityQueueMixin[T], asyncio.PriorityQueue):
722
734
  """
723
735
  A :class:`~asyncio.PriorityQueue` subclass that allows priorities to be updated (or computed) on the fly.
724
- # NOTE: WIP
736
+
737
+ This queue supports dynamic priority updates, making it suitable for tasks
738
+ where priorities may change over time. It ensures that tasks are processed
739
+ based on the most current priority.
740
+
741
+ Example:
742
+ >>> queue = VariablePriorityQueue()
743
+ >>> queue.put_nowait((1, 'task1'))
744
+ >>> queue.put_nowait((2, 'task2'))
745
+ >>> task = queue.get_nowait()
746
+ >>> print(task)
747
+
748
+ See Also:
749
+ :class:`~asyncio.PriorityQueue`
725
750
  """
726
751
 
727
752
 
@@ -729,7 +754,21 @@ class SmartProcessingQueue(
729
754
  _VariablePriorityQueueMixin[T], ProcessingQueue[Concatenate[T, P], V]
730
755
  ):
731
756
  """
732
- A PriorityProcessingQueue subclass that will execute jobs with the most waiters first
757
+ A processing queue that will execute jobs with the most waiters first, supporting dynamic priorities.
758
+
759
+ This queue is designed to handle tasks with dynamic priorities, ensuring that
760
+ tasks with the most waiters are prioritized. It is ideal for scenarios where
761
+ task execution order is influenced by the number of waiters.
762
+
763
+ Example:
764
+ >>> async def process_task(data): return data.upper()
765
+ >>> queue = SmartProcessingQueue(func=process_task, num_workers=5)
766
+ >>> fut = await queue.put(item='task')
767
+ >>> print(await fut)
768
+ TASK
769
+
770
+ See Also:
771
+ :class:`~ProcessingQueue`
733
772
  """
734
773
 
735
774
  _no_futs = False
a_sync/task.py CHANGED
@@ -45,21 +45,30 @@ class TaskMapping(DefaultDict[K, "asyncio.Task[V]"], AsyncIterable[Tuple[K, V]])
45
45
  """
46
46
  A mapping of keys to asynchronous tasks with additional functionality.
47
47
 
48
- TaskMapping is a specialized dictionary that maps keys to asyncio Tasks. It provides
48
+ `TaskMapping` is a specialized dictionary that maps keys to `asyncio` Tasks. It provides
49
49
  convenient methods for creating, managing, and iterating over these tasks asynchronously.
50
50
 
51
- Example:
52
- async def fetch_data(url: str) -> str:
53
- async with aiohttp.ClientSession() as session:
54
- async with session.get(url) as response:
55
- return await response.text()
56
-
57
- tasks = TaskMapping(fetch_data, name='url_fetcher', concurrency=5)
58
- tasks['example.com'] = 'http://example.com'
59
- tasks['python.org'] = 'https://www.python.org'
51
+ Tasks are created automatically for each key using a provided function. You cannot manually set items in a `TaskMapping` using dictionary-like syntax.
60
52
 
61
- async for key, result in tasks:
62
- print(f"Data for {key}: {result}")
53
+ Example:
54
+ >>> async def fetch_data(url: str) -> str:
55
+ ... async with aiohttp.ClientSession() as session:
56
+ ... async with session.get(url) as response:
57
+ ... return await response.text()
58
+ ...
59
+ >>> tasks = TaskMapping(fetch_data, ['http://example.com', 'https://www.python.org'], name='url_fetcher', concurrency=5)
60
+ >>> async for key, result in tasks:
61
+ ... print(f"Data for {key}: {result}")
62
+ ...
63
+ Data for python.org: http://python.org
64
+ Data for example.com: http://example.com
65
+
66
+ Note:
67
+ You cannot manually set items in a `TaskMapping` using dictionary-like syntax. Tasks are created and managed internally.
68
+
69
+ See Also:
70
+ - :class:`asyncio.Task`
71
+ - :func:`asyncio.create_task`
63
72
  """
64
73
 
65
74
  concurrency: Optional[int] = None
@@ -111,6 +120,13 @@ class TaskMapping(DefaultDict[K, "asyncio.Task[V]"], AsyncIterable[Tuple[K, V]])
111
120
  name: An optional name for the tasks created by this mapping.
112
121
  concurrency: Maximum number of tasks to run concurrently.
113
122
  **wrapped_func_kwargs: Additional keyword arguments to be passed to wrapped_func.
123
+
124
+ Example:
125
+ async def process_item(item: int) -> int:
126
+ await asyncio.sleep(1)
127
+ return item * 2
128
+
129
+ task_map = TaskMapping(process_item, [1, 2, 3], concurrency=2)
114
130
  """
115
131
 
116
132
  if concurrency:
@@ -126,9 +142,11 @@ class TaskMapping(DefaultDict[K, "asyncio.Task[V]"], AsyncIterable[Tuple[K, V]])
126
142
  self._wrapped_func = wrapped_func
127
143
  "The function used to create tasks for each key."
128
144
 
129
- if isinstance(wrapped_func, ASyncMethodDescriptor):
130
- if _kwargs.get_flag_name(wrapped_func_kwargs) is None:
131
- wrapped_func_kwargs["sync"] = False
145
+ if (
146
+ isinstance(wrapped_func, ASyncMethodDescriptor)
147
+ and _kwargs.get_flag_name(wrapped_func_kwargs) is None
148
+ ):
149
+ wrapped_func_kwargs["sync"] = False
132
150
  if wrapped_func_kwargs:
133
151
  self._wrapped_func_kwargs = wrapped_func_kwargs
134
152
 
@@ -293,6 +311,15 @@ class TaskMapping(DefaultDict[K, "asyncio.Task[V]"], AsyncIterable[Tuple[K, V]])
293
311
  Yields:
294
312
  Depending on `yields`, either keys, values,
295
313
  or tuples of key-value pairs representing the results of completed tasks.
314
+
315
+ Example:
316
+ async def process_item(item: int) -> int:
317
+ await asyncio.sleep(1)
318
+ return item * 2
319
+
320
+ task_map = TaskMapping(process_item)
321
+ async for key, result in task_map.map([1, 2, 3]):
322
+ print(f"Processed {key}: {result}")
296
323
  """
297
324
  self._if_pop_check_destroyed(pop)
298
325
 
@@ -418,6 +445,15 @@ class TaskMapping(DefaultDict[K, "asyncio.Task[V]"], AsyncIterable[Tuple[K, V]])
418
445
 
419
446
  Yields:
420
447
  Tuples of key-value pairs representing the results of completed tasks.
448
+
449
+ Example:
450
+ async def process_item(item: int) -> int:
451
+ await asyncio.sleep(1)
452
+ return item * 2
453
+
454
+ task_map = TaskMapping(process_item, [1, 2, 3])
455
+ async for key, result in task_map.yield_completed():
456
+ print(f"Completed {key}: {result}")
421
457
  """
422
458
  if pop:
423
459
  for k, task in dict(self).items():
a_sync/utils/iterators.py CHANGED
@@ -22,11 +22,22 @@ async def exhaust_iterator(
22
22
  """
23
23
  Asynchronously iterates over items from the given async iterator and optionally places them into a queue.
24
24
 
25
- This function is a utility to exhaust an async iterator, with an option to forward the iterated items to a provided asyncio.Queue. It's particularly useful when dealing with asynchronous operations that produce items to be consumed by other parts of an application, enabling a producer-consumer pattern.
25
+ This function is a utility to exhaust an async iterator, with an option to forward the iterated items to a provided queue-like object.
26
+ The queue should have a `put_nowait` method. This is particularly useful when dealing with asynchronous operations that produce items
27
+ to be consumed by other parts of an application, enabling a producer-consumer pattern.
26
28
 
27
29
  Args:
28
30
  iterator (AsyncIterator[T]): The async iterator to exhaust.
29
- queue (Optional[asyncio.Queue]): An optional queue where iterated items will be placed. If None, items are simply consumed.
31
+ queue (Optional[asyncio.Queue]): An optional queue-like object where iterated items will be placed.
32
+ The queue should support the `put_nowait` method. If None, items are simply consumed.
33
+
34
+ Example:
35
+ >>> async def example():
36
+ >>> await exhaust_iterator(some_async_iterator, queue=my_queue)
37
+
38
+ See Also:
39
+ - :func:`exhaust_iterators`
40
+ - :func:`as_yielded`
30
41
  """
31
42
  async for thing in iterator:
32
43
  if queue:
@@ -40,12 +51,24 @@ async def exhaust_iterators(
40
51
  """
41
52
  Asynchronously iterates over multiple async iterators concurrently and optionally places their items into a queue.
42
53
 
43
- This function leverages asyncio.gather to concurrently exhaust multiple async iterators. It's useful in scenarios where items from multiple async sources need to be processed or collected together, supporting concurrent operations and efficient multitasking.
54
+ This function leverages :func:`asyncio.gather` to concurrently exhaust multiple async iterators. It's useful in scenarios where items
55
+ from multiple async sources need to be processed or collected together, supporting concurrent operations and efficient multitasking.
44
56
 
45
57
  Args:
46
58
  iterators: A sequence of async iterators to be exhausted concurrently.
47
- queue (Optional[asyncio.Queue]): An optional queue where items from all iterators will be placed. If None, items are simply consumed.
59
+ queue (Optional[Queue]): An optional queue-like object where items from all iterators will be placed. If None, items are simply consumed.
48
60
  join (Optional[bool]): If a queue was provided and join is True, this coroutine will continue to run until all queue items have been processed.
61
+
62
+ Raises:
63
+ ValueError: If `join` is True but no `queue` is provided.
64
+
65
+ Example:
66
+ >>> async def example():
67
+ >>> await exhaust_iterators([iterator1, iterator2], queue=my_queue, join=True)
68
+
69
+ See Also:
70
+ - :func:`exhaust_iterator`
71
+ - :func:`as_yielded`
49
72
  """
50
73
  for x in await asyncio.gather(
51
74
  *[exhaust_iterator(iterator, queue=queue) for iterator in iterators],
@@ -167,17 +190,31 @@ async def as_yielded(*iterators: AsyncIterator[T]) -> AsyncIterator[T]: # type:
167
190
  """
168
191
  Merges multiple async iterators into a single async iterator that yields items as they become available from any of the source iterators.
169
192
 
170
- This function is designed to streamline the handling of multiple asynchronous data streams by consolidating them into a single asynchronous iteration context. It enables concurrent fetching and processing of items from multiple sources, improving efficiency and simplifying code structure when dealing with asynchronous operations.
193
+ This function is designed to streamline the handling of multiple asynchronous data streams by consolidating them into a single asynchronous iteration context.
194
+ It enables concurrent fetching and processing of items from multiple sources, improving efficiency and simplifying code structure when dealing with asynchronous operations.
171
195
 
172
- The merging process is facilitated by internally managing a queue where items from the source iterators are placed as they are fetched. This mechanism ensures that the merged stream of items is delivered in an order determined by the availability of items from the source iterators, rather than their original sequence.
196
+ The merging process is facilitated by the :func:`exhaust_iterators` function, which concurrently processes the source iterators and places their items into a queue.
197
+ This mechanism ensures that the merged stream of items is delivered in an order determined by the availability of items from the source iterators, rather than their original sequence.
173
198
 
174
- The function handles exceptions and ensures robustness and reliability by using asyncio tasks and queues. It manages edge cases such as early termination and exception management. The `_Done` sentinel class is used internally to signal the completion of processing.
199
+ The function handles exceptions and ensures robustness and reliability by using asyncio tasks and queues. It manages edge cases such as early termination and exception management.
200
+ The :class:`_Done` sentinel class is used internally to signal the completion of processing.
175
201
 
176
202
  Args:
177
- *iterators: Variable length list of AsyncIterator objects to be merged.
203
+ *iterators: Variable length list of :class:`~collections.abc.AsyncIterator` objects to be merged.
178
204
 
179
205
  Note:
180
- This implementation leverages asyncio tasks and queues to efficiently manage the asynchronous iteration and merging process. It handles edge cases such as early termination and exception management, ensuring robustness and reliability. The `_Done` sentinel class is used internally to signal the completion of processing.
206
+ This implementation leverages asyncio tasks and queues to efficiently manage the asynchronous iteration and merging process.
207
+ It handles edge cases such as early termination and exception management, ensuring robustness and reliability.
208
+ The :class:`_Done` sentinel class is used internally to signal the completion of processing.
209
+
210
+ Example:
211
+ >>> async def example():
212
+ >>> async for item in as_yielded(iterator1, iterator2):
213
+ >>> print(item)
214
+
215
+ See Also:
216
+ - :func:`exhaust_iterator`
217
+ - :func:`exhaust_iterators`
181
218
  """
182
219
  # hypothesis idea: _Done should never be exposed to user, works for all desired input types
183
220
  queue: Queue[Union[T, _Done]] = Queue()
@@ -224,17 +261,16 @@ async def as_yielded(*iterators: AsyncIterator[T]) -> AsyncIterator[T]: # type:
224
261
 
225
262
  class _Done:
226
263
  """
227
- A sentinel class used to signal the completion of processing in the as_yielded function.
264
+ A sentinel class used to signal the completion of processing in the :func:`as_yielded` function.
265
+
266
+ This class acts as a marker to indicate that all items have been processed and the asynchronous iteration can be concluded.
267
+ It is used internally within the implementation of :func:`as_yielded` to efficiently manage the termination of the iteration process once all source iterators have been exhausted.
228
268
 
229
- This class acts as a marker to indicate that all items have been processed and the asynchronous iteration can be concluded. It is used internally within the implementation of as_yielded to efficiently manage the termination of the iteration process once all source iterators have been exhausted.
269
+ Args:
270
+ exc (Optional[Exception]): An optional exception to be associated with the completion.
230
271
  """
231
272
 
232
273
  def __init__(self, exc: Optional[Exception] = None) -> None:
233
- """Initializes the _Done sentinel.
234
-
235
- Args:
236
- exc (Optional[Exception]): An optional exception to be associated with the completion.
237
- """
238
274
  self._exc = exc
239
275
 
240
276
  @property
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ez-a-sync
3
- Version: 0.22.15
3
+ Version: 0.22.16
4
4
  Summary: A library that makes it easy to define objects that can be used for both sync and async use cases.
5
5
  Home-page: https://github.com/BobTheBuidler/a-sync
6
6
  Author: BobTheBuidler
@@ -0,0 +1,74 @@
1
+ a_sync/ENVIRONMENT_VARIABLES.py,sha256=JHTnvIA5rEXWLqMwNeJ2ewFwVqyg9T6CYcrdxWnnfyo,1309
2
+ a_sync/__init__.py,sha256=wJdpi219HqUD4dN-zLK1VNE-3K_R2aa_R98ZnbVILL8,4787
3
+ a_sync/_smart.py,sha256=qd6hkURaVHZZ1i9L88cJDv6SF7MnJM5bmrVGVQ12KuA,11526
4
+ a_sync/_typing.py,sha256=M7ZzA6iUSl8Ur4zdySJLxWKk9CdA_kSHjbV0j72CRgQ,6628
5
+ a_sync/aliases.py,sha256=TbLyuLeFfJEmcC5-NP6h4DQ9QXlQjGny2NUP_x1tflw,212
6
+ a_sync/exceptions.py,sha256=Zm1YG55ApYF1NWciIkRpTgLMlgyCcAk6MadHTgwvhbs,12680
7
+ a_sync/executor.py,sha256=4YzCSWPbas3sPeOJO-S-cwo1dLk-eq20F6bMVqZvxAA,14994
8
+ a_sync/future.py,sha256=PxR1nm5DOYohDE5CWD7HXgzdP_3GDUg0_oFVBLvn7_A,48827
9
+ a_sync/iter.py,sha256=KelwlPfUU5XIPgMpimIV4VwAjiRhOzRc2fK1TG5tTmY,23225
10
+ a_sync/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
+ a_sync/task.py,sha256=dqi4TA8uIkONqHDMt1DUmotCbmhGwAR6iSbrA-h3SwI,32376
12
+ a_sync/a_sync/__init__.py,sha256=qENRGhOVdj0uwYHqGZwDdTTYDepvpb0qjzVJn7Nco78,1814
13
+ a_sync/a_sync/_descriptor.py,sha256=oO0AEfPVFGhG-YosH3G8-vUJM3ZU7PRwMDGNCP8XjSI,13537
14
+ a_sync/a_sync/_flags.py,sha256=ZxlbF0OdGP-35dc5VjSh1YIGvbailrXaEOg1ZhYsjgs,3640
15
+ a_sync/a_sync/_helpers.py,sha256=Emy4iiWHCw8e0HmqeihgQHKKCkGFax3tRlnUFjz8fCg,3144
16
+ a_sync/a_sync/_kwargs.py,sha256=130eL6M54DUEDERA9Jn8V2kz8AZqraVsNC55ovyvS5s,2008
17
+ a_sync/a_sync/_meta.py,sha256=psCfXKFY85OH8biUr-KT6Ew_YE0UOjficWtPdq4qMCE,9577
18
+ a_sync/a_sync/abstract.py,sha256=OAkIqO6Zqm6QZfrmclpn85nsXJhI0v1aBFQwfwYsqoQ,7347
19
+ a_sync/a_sync/base.py,sha256=rZolx0B4Dof84ahC3pfTDTQe4laZppNnAcliAmNkjDw,7431
20
+ a_sync/a_sync/config.py,sha256=K7KunG2cvOfKic8BzShgvIgOY7GFa0p_S4S1lzPkqeY,5251
21
+ a_sync/a_sync/decorator.py,sha256=O1m6kTBMCjV5coZRLHZoEsjbw0R4DQXD2huDehAi2qg,17039
22
+ a_sync/a_sync/function.py,sha256=06kgIoYISmEc4AbvxL_G_d-3inc7V6atdE5HVZsXowA,40721
23
+ a_sync/a_sync/method.py,sha256=s3nA1VrOo-wwiZd3gEosL1ERYLlps-IAhppgOJptzAw,32448
24
+ a_sync/a_sync/property.py,sha256=llVqhVksV717ejaDBne7DHBuuMUmHdlsW83wcTElQJQ,25203
25
+ a_sync/a_sync/singleton.py,sha256=fdzGGX8QeUTndE_7VQSzvZUe0NAWQ1vWLq_y2Ruo1Ws,2414
26
+ a_sync/a_sync/modifiers/__init__.py,sha256=zi1qI3paHjXxFGx2J2fX5EuWAgvY_-5MYFLwv931he4,4120
27
+ a_sync/a_sync/modifiers/limiter.py,sha256=YCEgmu_Cm06kq16F5YA2wMie9pgS-VZQ3U_WhgkO-Ps,4308
28
+ a_sync/a_sync/modifiers/manager.py,sha256=5VWhsP1xF5zxZwq8q2IICqIRwnqD_lvqVsaVHB_eY3k,7152
29
+ a_sync/a_sync/modifiers/semaphores.py,sha256=a6pUablxdXkSVpXpzUFk1lnfsu0U4yFXAoKZyt-u26Q,6796
30
+ a_sync/a_sync/modifiers/cache/__init__.py,sha256=PMydVB29OhcBqhHGZnRDwdoObIvEYxkbqcbko4ZY8g8,5389
31
+ a_sync/a_sync/modifiers/cache/memory.py,sha256=6w-C7q_K_ccPvYd6Jl0W2xYsg2JKLR_FmrWrfyIebfE,5394
32
+ a_sync/asyncio/__init__.py,sha256=IBAciQTCGADpg3Omwnb9zw-MXUk40Gu6Yo_7ZMZk8sA,6135
33
+ a_sync/asyncio/as_completed.py,sha256=xIvgyuN20JKMFakLlyJ_Vm1LpAZIU5KdQUkkKqMQT20,9402
34
+ a_sync/asyncio/create_task.py,sha256=f3_PPdhK4lqDvJWSiI2SWaV0oa6iYliUlLnWi6JalZM,5078
35
+ a_sync/asyncio/gather.py,sha256=AcqE0RGCCDA9SMtIgIMRZmfm0rjHEkREibA2L40Mx3Y,8072
36
+ a_sync/asyncio/utils.py,sha256=C9XmHO_zU0-Cgob9_YYmqGfyjYyskKqySqyvUcWo7LU,391
37
+ a_sync/primitives/__init__.py,sha256=zpmDwVSUOknQyIOTedyhwRWygiXSmYvjdl83oFAr1Rk,737
38
+ a_sync/primitives/_debug.py,sha256=utZn33eYUzZAlf1VG5OPkBpHVCYV8gdj_YAJHQhvVsM,5073
39
+ a_sync/primitives/_loggable.py,sha256=Ki7lLrTwdCXhnZ18Ns3mEOCJJcLFp8SVntzwzjVFjAM,2067
40
+ a_sync/primitives/queue.py,sha256=BmgYNFm3k3czTkjFwwcH3rTNyUMXDRyEaRpqLohjAc4,30261
41
+ a_sync/primitives/locks/__init__.py,sha256=zKSLeCUgMGSbuXN6ehb2dm0-4lbk-Ze8_ldatWEjy4Y,283
42
+ a_sync/primitives/locks/counter.py,sha256=o7aAnEEG04Yybcbr1FbSpzjVM3JzXwXDvDTlZeNtZ6I,7276
43
+ a_sync/primitives/locks/event.py,sha256=U1suI0OZEH8X3jxF_MChGUtuhemLzROqqd6a7bpHM8E,2974
44
+ a_sync/primitives/locks/prio_semaphore.py,sha256=QNVOVk0m9509viBqEzruze_IKnIKBOarKj_xs42AERA,14755
45
+ a_sync/primitives/locks/semaphore.py,sha256=8EZExO0L_bFCmSFzxaZO9oIqWNTIS4LMnYvukQncZwQ,8120
46
+ a_sync/sphinx/__init__.py,sha256=UvdsakVmkn0Lw4vEd3jA3_Acymde95-78o87lel8ikk,49
47
+ a_sync/sphinx/ext.py,sha256=3ktEWH1brCbHZi2m4BGwglfLyA5JjF05cB0e_c-MVTg,8997
48
+ a_sync/utils/__init__.py,sha256=4JiGSyL5Jj2G7cAmeQN6fxwFnzyOtWYd9IeX8BhrR2k,3223
49
+ a_sync/utils/iterators.py,sha256=jTeLdR5sAqUcB20ZnKZkwVNjJH3QBKgv4kw4yqlV5yI,10723
50
+ tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
51
+ tests/conftest.py,sha256=cZPQoOF3h1OtsFzEb7Jkx54COExVdf0XfD0LZBrmqcg,57
52
+ tests/executor.py,sha256=7B1SWNM--gYcqgTgFXL_e0yHGGY2xuC5Fo6hozkOWqw,8594
53
+ tests/fixtures.py,sha256=4j3GsopNo6tMFOyhWOb8Hnwcwiw2Xdjee4r1YLH1JBE,5774
54
+ tests/test_abstract.py,sha256=bYV06gMvdnjU3KQJZEU5pvljgeaGcbnQnIQQn8yuVo8,1158
55
+ tests/test_as_completed.py,sha256=_oy8L4xmK7NvpAaNPLdM6wwPtSD54nGxwQucK9pDvMg,4527
56
+ tests/test_base.py,sha256=FkcDjam4wR3Bm5TDx5ZI6mlEENSXlzDruIP6YIt4_MI,16630
57
+ tests/test_cache.py,sha256=lgcULExF1Nw4cjvujVvxub5CROqatf6TOkluUSVypIY,2562
58
+ tests/test_decorator.py,sha256=OMV2H6JjmGftdIFd5hgHBt80SF29AX8HtvvdyL8covg,3670
59
+ tests/test_executor.py,sha256=OG3cUeTq0MQkMg-hTsAh8qQqEqYMUaslX3xhXE_zkxA,1950
60
+ tests/test_future.py,sha256=9UUFAh6eP1HFkLchprpcnBjKH7TETZr48k-WUt2PKGc,3433
61
+ tests/test_gather.py,sha256=cjl20RIGbLNMn8hhEw9hFQ7qjWpLHIXVDVrAm6H5u4w,1382
62
+ tests/test_helpers.py,sha256=68DBihIMqAIQLkAS89yutfDv3bPsrgIShad7JS6Bqv0,349
63
+ tests/test_iter.py,sha256=jUaRiZMbfOu3HbGPXnijI2C11uXqde62gcPGG71v6m4,8995
64
+ tests/test_limiter.py,sha256=3nyrikkThrTXrx_7J4gR9ZCNXTYIdrgkXF9Av_T1wqc,829
65
+ tests/test_meta.py,sha256=NZyt6tjzypSJO2byY8NuthCXi545SdSeSDp8KBUkp1Q,3663
66
+ tests/test_modified.py,sha256=_O0-HUJLCC4Ok12QtDjT1_OyLJYDgP0K3O0XhrZTGYs,250
67
+ tests/test_semaphore.py,sha256=8WxfS-0eSlrWyRi-x_-KopsGEp_9oX4TgH2QB1DOnCM,1666
68
+ tests/test_singleton.py,sha256=krzO5QOhLIEV4PPpiHnNw-dFisTCemFcggWxh6lXAZw,1033
69
+ tests/test_task.py,sha256=xtTXdKHUEwQnNIZcMcAk_Xto5YJ0G-qt9Pkq9Dfdmwk,10271
70
+ ez_a_sync-0.22.16.dist-info/LICENSE.txt,sha256=1on6-17OUMlja6vSPTcmlmeT_DwujCZJijYxaplBvZk,1075
71
+ ez_a_sync-0.22.16.dist-info/METADATA,sha256=7IGGj1Gj5HHVv7Z5Fu1NZNeKZDgMZW_p2O_ZwRLBjw0,533
72
+ ez_a_sync-0.22.16.dist-info/WHEEL,sha256=R06PA3UVYHThwHvxuRWMqaGcr-PuniXahwjmQRFMEkY,91
73
+ ez_a_sync-0.22.16.dist-info/top_level.txt,sha256=GVK_7kp7dgBLeHp84iIQdsJmiXnrXd-5sIf2x0Q-VKc,13
74
+ ez_a_sync-0.22.16.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.4.0)
2
+ Generator: setuptools (75.5.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5