ez-a-sync 0.22.14__py3-none-any.whl → 0.22.16__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ez-a-sync might be problematic. Click here for more details.
- a_sync/ENVIRONMENT_VARIABLES.py +37 -5
- a_sync/__init__.py +53 -12
- a_sync/_smart.py +231 -28
- a_sync/_typing.py +112 -15
- a_sync/a_sync/__init__.py +35 -10
- a_sync/a_sync/_descriptor.py +248 -38
- a_sync/a_sync/_flags.py +78 -9
- a_sync/a_sync/_helpers.py +46 -13
- a_sync/a_sync/_kwargs.py +33 -8
- a_sync/a_sync/_meta.py +149 -28
- a_sync/a_sync/abstract.py +150 -28
- a_sync/a_sync/base.py +34 -16
- a_sync/a_sync/config.py +85 -14
- a_sync/a_sync/decorator.py +441 -139
- a_sync/a_sync/function.py +709 -147
- a_sync/a_sync/method.py +437 -110
- a_sync/a_sync/modifiers/__init__.py +85 -5
- a_sync/a_sync/modifiers/cache/__init__.py +116 -17
- a_sync/a_sync/modifiers/cache/memory.py +130 -20
- a_sync/a_sync/modifiers/limiter.py +101 -22
- a_sync/a_sync/modifiers/manager.py +142 -16
- a_sync/a_sync/modifiers/semaphores.py +121 -15
- a_sync/a_sync/property.py +383 -82
- a_sync/a_sync/singleton.py +44 -19
- a_sync/aliases.py +0 -1
- a_sync/asyncio/__init__.py +140 -1
- a_sync/asyncio/as_completed.py +213 -79
- a_sync/asyncio/create_task.py +70 -20
- a_sync/asyncio/gather.py +125 -58
- a_sync/asyncio/utils.py +3 -3
- a_sync/exceptions.py +248 -26
- a_sync/executor.py +164 -69
- a_sync/future.py +1227 -168
- a_sync/iter.py +173 -56
- a_sync/primitives/__init__.py +14 -2
- a_sync/primitives/_debug.py +72 -18
- a_sync/primitives/_loggable.py +41 -10
- a_sync/primitives/locks/__init__.py +5 -2
- a_sync/primitives/locks/counter.py +107 -38
- a_sync/primitives/locks/event.py +21 -7
- a_sync/primitives/locks/prio_semaphore.py +262 -63
- a_sync/primitives/locks/semaphore.py +138 -89
- a_sync/primitives/queue.py +601 -60
- a_sync/sphinx/__init__.py +0 -1
- a_sync/sphinx/ext.py +160 -50
- a_sync/task.py +313 -112
- a_sync/utils/__init__.py +12 -6
- a_sync/utils/iterators.py +170 -50
- {ez_a_sync-0.22.14.dist-info → ez_a_sync-0.22.16.dist-info}/METADATA +1 -1
- ez_a_sync-0.22.16.dist-info/RECORD +74 -0
- {ez_a_sync-0.22.14.dist-info → ez_a_sync-0.22.16.dist-info}/WHEEL +1 -1
- tests/conftest.py +1 -2
- tests/executor.py +250 -9
- tests/fixtures.py +61 -32
- tests/test_abstract.py +22 -4
- tests/test_as_completed.py +54 -21
- tests/test_base.py +264 -19
- tests/test_cache.py +31 -15
- tests/test_decorator.py +54 -28
- tests/test_executor.py +31 -13
- tests/test_future.py +45 -8
- tests/test_gather.py +8 -2
- tests/test_helpers.py +2 -0
- tests/test_iter.py +55 -13
- tests/test_limiter.py +5 -3
- tests/test_meta.py +23 -9
- tests/test_modified.py +4 -1
- tests/test_semaphore.py +15 -8
- tests/test_singleton.py +28 -11
- tests/test_task.py +162 -36
- ez_a_sync-0.22.14.dist-info/RECORD +0 -74
- {ez_a_sync-0.22.14.dist-info → ez_a_sync-0.22.16.dist-info}/LICENSE.txt +0 -0
- {ez_a_sync-0.22.14.dist-info → ez_a_sync-0.22.16.dist-info}/top_level.txt +0 -0
a_sync/primitives/queue.py
CHANGED
|
@@ -1,7 +1,16 @@
|
|
|
1
1
|
"""
|
|
2
|
-
This module provides various queue implementations for managing asynchronous tasks
|
|
3
|
-
|
|
4
|
-
|
|
2
|
+
This module provides various queue implementations for managing asynchronous tasks.
|
|
3
|
+
It includes standard FIFO queues, priority queues, and processing queues with enhanced functionality.
|
|
4
|
+
|
|
5
|
+
Classes:
|
|
6
|
+
Queue: A generic asynchronous queue that extends the functionality of `asyncio.Queue`.
|
|
7
|
+
ProcessingQueue: A queue designed for processing tasks asynchronously with multiple workers.
|
|
8
|
+
PriorityProcessingQueue: A priority-based processing queue where tasks are processed based on priority.
|
|
9
|
+
SmartProcessingQueue: A processing queue that executes jobs with the most waiters first, supporting dynamic priorities.
|
|
10
|
+
|
|
11
|
+
See Also:
|
|
12
|
+
`asyncio.Queue`: The base class for asynchronous FIFO queues.
|
|
13
|
+
`asyncio.PriorityQueue`: The base class for priority queues.
|
|
5
14
|
"""
|
|
6
15
|
|
|
7
16
|
import asyncio
|
|
@@ -18,32 +27,137 @@ from a_sync._typing import *
|
|
|
18
27
|
logger = logging.getLogger(__name__)
|
|
19
28
|
|
|
20
29
|
if sys.version_info < (3, 9):
|
|
30
|
+
|
|
21
31
|
class _Queue(asyncio.Queue, Generic[T]):
|
|
22
|
-
__slots__ =
|
|
32
|
+
__slots__ = (
|
|
33
|
+
"_maxsize",
|
|
34
|
+
"_loop",
|
|
35
|
+
"_getters",
|
|
36
|
+
"_putters",
|
|
37
|
+
"_unfinished_tasks",
|
|
38
|
+
"_finished",
|
|
39
|
+
)
|
|
40
|
+
|
|
23
41
|
else:
|
|
42
|
+
|
|
24
43
|
class _Queue(asyncio.Queue[T]):
|
|
25
44
|
__slots__ = "_maxsize", "_getters", "_putters", "_unfinished_tasks", "_finished"
|
|
26
45
|
|
|
46
|
+
|
|
27
47
|
class Queue(_Queue[T]):
|
|
28
|
-
|
|
48
|
+
"""
|
|
49
|
+
A generic asynchronous queue that extends the functionality of `asyncio.Queue`.
|
|
50
|
+
|
|
51
|
+
This implementation supports retrieving multiple items at once and handling
|
|
52
|
+
task processing in both FIFO and LIFO order. It provides enhanced type hinting
|
|
53
|
+
support and additional methods for bulk operations.
|
|
54
|
+
|
|
55
|
+
Inherits from:
|
|
56
|
+
- :class:`~asyncio.Queue`
|
|
57
|
+
|
|
58
|
+
Example:
|
|
59
|
+
>>> queue = Queue()
|
|
60
|
+
>>> await queue.put(item='task1')
|
|
61
|
+
>>> await queue.put(item='task2')
|
|
62
|
+
>>> result = await queue.get()
|
|
63
|
+
>>> print(result)
|
|
64
|
+
task1
|
|
65
|
+
>>> all_tasks = await queue.get_all()
|
|
66
|
+
>>> print(all_tasks)
|
|
67
|
+
['task2']
|
|
68
|
+
"""
|
|
69
|
+
|
|
29
70
|
async def get(self) -> T:
|
|
30
71
|
self._queue
|
|
72
|
+
"""
|
|
73
|
+
Asynchronously retrieves and removes the next item from the queue.
|
|
74
|
+
|
|
75
|
+
If the queue is empty, this method will block until an item is available.
|
|
76
|
+
|
|
77
|
+
Example:
|
|
78
|
+
>>> result = await queue.get()
|
|
79
|
+
>>> print(result)
|
|
80
|
+
"""
|
|
31
81
|
return await _Queue.get(self)
|
|
82
|
+
|
|
32
83
|
def get_nowait(self) -> T:
|
|
84
|
+
"""
|
|
85
|
+
Retrieves and removes the next item from the queue without blocking.
|
|
86
|
+
|
|
87
|
+
This method does not wait for an item to be available and will raise
|
|
88
|
+
an exception if the queue is empty.
|
|
89
|
+
|
|
90
|
+
Raises:
|
|
91
|
+
:exc:`~asyncio.QueueEmpty`: If the queue is empty.
|
|
92
|
+
|
|
93
|
+
Example:
|
|
94
|
+
>>> result = queue.get_nowait()
|
|
95
|
+
>>> print(result)
|
|
96
|
+
"""
|
|
33
97
|
return _Queue.get_nowait(self)
|
|
98
|
+
|
|
34
99
|
async def put(self, item: T) -> None:
|
|
100
|
+
"""
|
|
101
|
+
Asynchronously adds an item to the queue.
|
|
102
|
+
|
|
103
|
+
If the queue is full, this method will block until space is available.
|
|
104
|
+
|
|
105
|
+
Args:
|
|
106
|
+
item: The item to add to the queue.
|
|
107
|
+
|
|
108
|
+
Example:
|
|
109
|
+
>>> await queue.put(item='task')
|
|
110
|
+
"""
|
|
35
111
|
return _Queue.put(self, item)
|
|
112
|
+
|
|
36
113
|
def put_nowait(self, item: T) -> None:
|
|
114
|
+
"""
|
|
115
|
+
Adds an item to the queue without blocking.
|
|
116
|
+
|
|
117
|
+
This method does not wait for space to be available and will raise
|
|
118
|
+
an exception if the queue is full.
|
|
119
|
+
|
|
120
|
+
Args:
|
|
121
|
+
item: The item to add to the queue.
|
|
122
|
+
|
|
123
|
+
Raises:
|
|
124
|
+
:exc:`~asyncio.QueueFull`: If the queue is full.
|
|
125
|
+
|
|
126
|
+
Example:
|
|
127
|
+
>>> queue.put_nowait(item='task')
|
|
128
|
+
"""
|
|
37
129
|
return _Queue.put_nowait(self, item)
|
|
38
|
-
|
|
130
|
+
|
|
39
131
|
async def get_all(self) -> List[T]:
|
|
40
|
-
"""
|
|
132
|
+
"""
|
|
133
|
+
Asynchronously retrieves and removes all available items from the queue.
|
|
134
|
+
|
|
135
|
+
If the queue is empty, this method will wait until at least one item
|
|
136
|
+
is available before returning.
|
|
137
|
+
|
|
138
|
+
Example:
|
|
139
|
+
>>> tasks = await queue.get_all()
|
|
140
|
+
>>> print(tasks)
|
|
141
|
+
"""
|
|
41
142
|
try:
|
|
42
143
|
return self.get_all_nowait()
|
|
43
144
|
except asyncio.QueueEmpty:
|
|
44
145
|
return [await self.get()]
|
|
146
|
+
|
|
45
147
|
def get_all_nowait(self) -> List[T]:
|
|
46
|
-
"""
|
|
148
|
+
"""
|
|
149
|
+
Retrieves and removes all available items from the queue without waiting.
|
|
150
|
+
|
|
151
|
+
This method does not wait for items to be available and will raise
|
|
152
|
+
an exception if the queue is empty.
|
|
153
|
+
|
|
154
|
+
Raises:
|
|
155
|
+
:exc:`~asyncio.QueueEmpty`: If the queue is empty.
|
|
156
|
+
|
|
157
|
+
Example:
|
|
158
|
+
>>> tasks = queue.get_all_nowait()
|
|
159
|
+
>>> print(tasks)
|
|
160
|
+
"""
|
|
47
161
|
values: List[T] = []
|
|
48
162
|
while True:
|
|
49
163
|
try:
|
|
@@ -52,20 +166,47 @@ class Queue(_Queue[T]):
|
|
|
52
166
|
if not values:
|
|
53
167
|
raise asyncio.QueueEmpty from e
|
|
54
168
|
return values
|
|
55
|
-
|
|
169
|
+
|
|
56
170
|
async def get_multi(self, i: int, can_return_less: bool = False) -> List[T]:
|
|
171
|
+
"""
|
|
172
|
+
Asynchronously retrieves up to `i` items from the queue.
|
|
173
|
+
|
|
174
|
+
Args:
|
|
175
|
+
i: The number of items to retrieve.
|
|
176
|
+
can_return_less: If True, may return fewer than `i` items if queue is emptied.
|
|
177
|
+
|
|
178
|
+
Raises:
|
|
179
|
+
:exc:`~asyncio.QueueEmpty`: If no items are available and fewer items cannot be returned.
|
|
180
|
+
|
|
181
|
+
Example:
|
|
182
|
+
>>> tasks = await queue.get_multi(i=2, can_return_less=True)
|
|
183
|
+
>>> print(tasks)
|
|
184
|
+
"""
|
|
57
185
|
_validate_args(i, can_return_less)
|
|
58
186
|
items = []
|
|
59
187
|
while len(items) < i and not can_return_less:
|
|
60
188
|
try:
|
|
61
|
-
items.extend(
|
|
189
|
+
items.extend(
|
|
190
|
+
self.get_multi_nowait(i - len(items), can_return_less=True)
|
|
191
|
+
)
|
|
62
192
|
except asyncio.QueueEmpty:
|
|
63
193
|
items = [await self.get()]
|
|
64
194
|
return items
|
|
195
|
+
|
|
65
196
|
def get_multi_nowait(self, i: int, can_return_less: bool = False) -> List[T]:
|
|
66
197
|
"""
|
|
67
|
-
|
|
68
|
-
|
|
198
|
+
Retrieves up to `i` items from the queue without waiting.
|
|
199
|
+
|
|
200
|
+
Args:
|
|
201
|
+
i: The number of items to retrieve.
|
|
202
|
+
can_return_less: If True, may return fewer than `i` items if queue is emptied.
|
|
203
|
+
|
|
204
|
+
Raises:
|
|
205
|
+
:exc:`~asyncio.QueueEmpty`: If no items are available and fewer items cannot be returned.
|
|
206
|
+
|
|
207
|
+
Example:
|
|
208
|
+
>>> tasks = queue.get_multi_nowait(i=3, can_return_less=True)
|
|
209
|
+
>>> print(tasks)
|
|
69
210
|
"""
|
|
70
211
|
_validate_args(i, can_return_less)
|
|
71
212
|
items = []
|
|
@@ -83,35 +224,84 @@ class Queue(_Queue[T]):
|
|
|
83
224
|
|
|
84
225
|
|
|
85
226
|
class ProcessingQueue(_Queue[Tuple[P, "asyncio.Future[V]"]], Generic[P, V]):
|
|
227
|
+
"""
|
|
228
|
+
A queue designed for processing tasks asynchronously with multiple workers.
|
|
229
|
+
|
|
230
|
+
Each item in the queue is processed by a worker, and tasks can return results
|
|
231
|
+
via asynchronous futures. This queue is ideal for scenarios where tasks need
|
|
232
|
+
to be processed concurrently with a fixed number of workers.
|
|
233
|
+
|
|
234
|
+
Example:
|
|
235
|
+
>>> async def process_task(data): return data.upper()
|
|
236
|
+
>>> queue = ProcessingQueue(func=process_task, num_workers=5)
|
|
237
|
+
>>> fut = await queue.put(item='task')
|
|
238
|
+
>>> print(await fut)
|
|
239
|
+
TASK
|
|
240
|
+
"""
|
|
241
|
+
|
|
86
242
|
_closed: bool = False
|
|
243
|
+
"""Indicates whether the queue is closed."""
|
|
244
|
+
|
|
87
245
|
__slots__ = "func", "num_workers", "_worker_coro"
|
|
246
|
+
|
|
88
247
|
def __init__(
|
|
89
|
-
self,
|
|
90
|
-
func: Callable[P, Awaitable[V]],
|
|
91
|
-
num_workers: int,
|
|
92
|
-
*,
|
|
93
|
-
return_data: bool = True,
|
|
248
|
+
self,
|
|
249
|
+
func: Callable[P, Awaitable[V]],
|
|
250
|
+
num_workers: int,
|
|
251
|
+
*,
|
|
252
|
+
return_data: bool = True,
|
|
94
253
|
name: str = "",
|
|
95
254
|
loop: Optional[asyncio.AbstractEventLoop] = None,
|
|
96
255
|
) -> None:
|
|
256
|
+
"""
|
|
257
|
+
Initializes a processing queue with the given worker function and worker count.
|
|
258
|
+
|
|
259
|
+
Args:
|
|
260
|
+
func: The task function to process.
|
|
261
|
+
num_workers: Number of workers to process tasks.
|
|
262
|
+
return_data: Whether tasks should return data via futures. Defaults to True.
|
|
263
|
+
name: Name of the queue. Defaults to an empty string.
|
|
264
|
+
loop: Optional event loop for the queue.
|
|
265
|
+
|
|
266
|
+
Example:
|
|
267
|
+
>>> queue = ProcessingQueue(func=my_task_func, num_workers=3, name='myqueue')
|
|
268
|
+
"""
|
|
97
269
|
if sys.version_info < (3, 10):
|
|
98
270
|
super().__init__(loop=loop)
|
|
99
271
|
elif loop:
|
|
100
|
-
raise NotImplementedError(
|
|
272
|
+
raise NotImplementedError(
|
|
273
|
+
f"You cannot pass a value for `loop` in python {sys.version_info}"
|
|
274
|
+
)
|
|
101
275
|
else:
|
|
102
276
|
super().__init__()
|
|
103
|
-
|
|
277
|
+
|
|
104
278
|
self.func = func
|
|
279
|
+
"""The function that each worker will process."""
|
|
280
|
+
|
|
105
281
|
self.num_workers = num_workers
|
|
282
|
+
"""The number of worker tasks for processing."""
|
|
283
|
+
|
|
106
284
|
self._name = name
|
|
285
|
+
"""Optional name for the queue."""
|
|
286
|
+
|
|
107
287
|
self._no_futs = not return_data
|
|
288
|
+
"""Indicates whether tasks will return data via futures."""
|
|
289
|
+
|
|
108
290
|
@functools.wraps(func)
|
|
109
291
|
async def _worker_coro() -> NoReturn:
|
|
110
|
-
|
|
292
|
+
"""Worker coroutine for processing tasks."""
|
|
111
293
|
return await self.__worker_coro()
|
|
294
|
+
|
|
112
295
|
self._worker_coro = _worker_coro
|
|
296
|
+
|
|
113
297
|
# NOTE: asyncio defines both this and __str__
|
|
114
298
|
def __repr__(self) -> str:
|
|
299
|
+
"""
|
|
300
|
+
Provides a detailed string representation of the queue.
|
|
301
|
+
|
|
302
|
+
Example:
|
|
303
|
+
>>> print(queue)
|
|
304
|
+
"""
|
|
115
305
|
repr_string = f"<{type(self).__name__} at {hex(id(self))}"
|
|
116
306
|
if self._name:
|
|
117
307
|
repr_string += f" name={self._name}"
|
|
@@ -119,8 +309,15 @@ class ProcessingQueue(_Queue[Tuple[P, "asyncio.Future[V]"]], Generic[P, V]):
|
|
|
119
309
|
if self._unfinished_tasks:
|
|
120
310
|
repr_string += f" pending={self._unfinished_tasks}"
|
|
121
311
|
return f"{repr_string}>"
|
|
312
|
+
|
|
122
313
|
# NOTE: asyncio defines both this and __repr__
|
|
123
314
|
def __str__(self) -> str:
|
|
315
|
+
"""
|
|
316
|
+
Provides a string representation of the queue.
|
|
317
|
+
|
|
318
|
+
Example:
|
|
319
|
+
>>> print(queue)
|
|
320
|
+
"""
|
|
124
321
|
repr_string = f"<{type(self).__name__}"
|
|
125
322
|
if self._name:
|
|
126
323
|
repr_string += f" name={self._name}"
|
|
@@ -128,38 +325,98 @@ class ProcessingQueue(_Queue[Tuple[P, "asyncio.Future[V]"]], Generic[P, V]):
|
|
|
128
325
|
if self._unfinished_tasks:
|
|
129
326
|
repr_string += f" pending={self._unfinished_tasks}"
|
|
130
327
|
return f"{repr_string}>"
|
|
328
|
+
|
|
131
329
|
def __call__(self, *args: P.args, **kwargs: P.kwargs) -> "asyncio.Future[V]":
|
|
330
|
+
"""
|
|
331
|
+
Submits a task to the queue.
|
|
332
|
+
|
|
333
|
+
Example:
|
|
334
|
+
>>> fut = queue(*args, **kwargs)
|
|
335
|
+
>>> print(fut)
|
|
336
|
+
"""
|
|
132
337
|
return self.put_nowait(*args, **kwargs)
|
|
338
|
+
|
|
133
339
|
def __del__(self) -> None:
|
|
340
|
+
"""
|
|
341
|
+
Handles the deletion of the queue, ensuring tasks are handled.
|
|
342
|
+
"""
|
|
134
343
|
if self._closed:
|
|
135
344
|
return
|
|
136
345
|
if self._unfinished_tasks > 0:
|
|
137
346
|
context = {
|
|
138
|
-
|
|
347
|
+
"message": f"{self} was destroyed but has work pending!",
|
|
139
348
|
}
|
|
140
349
|
asyncio.get_event_loop().call_exception_handler(context)
|
|
350
|
+
|
|
141
351
|
@property
|
|
142
352
|
def name(self) -> str:
|
|
353
|
+
"""
|
|
354
|
+
Returns the name of the queue, or its representation.
|
|
355
|
+
|
|
356
|
+
Example:
|
|
357
|
+
>>> print(queue.name)
|
|
358
|
+
"""
|
|
143
359
|
return self._name or repr(self)
|
|
360
|
+
|
|
144
361
|
def close(self) -> None:
|
|
362
|
+
"""
|
|
363
|
+
Closes the queue, preventing further task submissions.
|
|
364
|
+
|
|
365
|
+
Example:
|
|
366
|
+
>>> queue.close()
|
|
367
|
+
"""
|
|
145
368
|
self._closed = True
|
|
369
|
+
|
|
146
370
|
async def put(self, *args: P.args, **kwargs: P.kwargs) -> "asyncio.Future[V]":
|
|
371
|
+
"""
|
|
372
|
+
Asynchronously submits a task to the queue.
|
|
373
|
+
|
|
374
|
+
Args:
|
|
375
|
+
args: Positional arguments for the task.
|
|
376
|
+
kwargs: Keyword arguments for the task.
|
|
377
|
+
|
|
378
|
+
Returns:
|
|
379
|
+
The future result of the task.
|
|
380
|
+
|
|
381
|
+
Example:
|
|
382
|
+
>>> fut = await queue.put(item='task')
|
|
383
|
+
>>> print(await fut)
|
|
384
|
+
"""
|
|
147
385
|
self._ensure_workers()
|
|
148
386
|
if self._no_futs:
|
|
149
387
|
return await super().put((args, kwargs))
|
|
150
388
|
fut = self._create_future()
|
|
151
389
|
await super().put((args, kwargs, fut))
|
|
152
390
|
return fut
|
|
391
|
+
|
|
153
392
|
def put_nowait(self, *args: P.args, **kwargs: P.kwargs) -> "asyncio.Future[V]":
|
|
393
|
+
"""
|
|
394
|
+
Immediately submits a task to the queue without waiting.
|
|
395
|
+
|
|
396
|
+
Args:
|
|
397
|
+
args: Positional arguments for the task.
|
|
398
|
+
kwargs: Keyword arguments for the task.
|
|
399
|
+
|
|
400
|
+
Returns:
|
|
401
|
+
The future result of the task.
|
|
402
|
+
|
|
403
|
+
Example:
|
|
404
|
+
>>> fut = queue.put_nowait(item='task')
|
|
405
|
+
>>> print(await fut)
|
|
406
|
+
"""
|
|
154
407
|
self._ensure_workers()
|
|
155
408
|
if self._no_futs:
|
|
156
409
|
return super().put_nowait((args, kwargs))
|
|
157
410
|
fut = self._create_future()
|
|
158
411
|
super().put_nowait((args, kwargs, weakref.proxy(fut)))
|
|
159
412
|
return fut
|
|
413
|
+
|
|
160
414
|
def _create_future(self) -> "asyncio.Future[V]":
|
|
415
|
+
"""Creates a future for the task."""
|
|
161
416
|
return asyncio.get_event_loop().create_future()
|
|
417
|
+
|
|
162
418
|
def _ensure_workers(self) -> None:
|
|
419
|
+
"""Ensures that the worker tasks are running."""
|
|
163
420
|
if self._closed:
|
|
164
421
|
raise RuntimeError(f"{type(self).__name__} is closed: ", self) from None
|
|
165
422
|
if self._workers.done():
|
|
@@ -170,29 +427,40 @@ class ProcessingQueue(_Queue[Tuple[P, "asyncio.Future[V]"]], Generic[P, V]):
|
|
|
170
427
|
# re-raise with clean traceback
|
|
171
428
|
try:
|
|
172
429
|
raise type(exc)(*exc.args).with_traceback(exc.__traceback__) # type: ignore [union-attr]
|
|
173
|
-
except TypeError:
|
|
174
|
-
raise exc.with_traceback(exc.__traceback__)
|
|
430
|
+
except TypeError as e:
|
|
431
|
+
raise exc.with_traceback(exc.__traceback__) from e
|
|
175
432
|
# this should never be reached, but just in case
|
|
176
433
|
exc = self._workers.exception()
|
|
177
434
|
try:
|
|
178
435
|
# re-raise with clean traceback
|
|
179
436
|
raise type(exc)(*exc.args).with_traceback(exc.__traceback__) # type: ignore [union-attr]
|
|
180
|
-
except TypeError:
|
|
181
|
-
raise exc.with_traceback(exc.__traceback__)
|
|
437
|
+
except TypeError as e:
|
|
438
|
+
raise exc.with_traceback(exc.__traceback__) from e
|
|
439
|
+
|
|
182
440
|
@functools.cached_property
|
|
183
441
|
def _workers(self) -> "asyncio.Task[NoReturn]":
|
|
442
|
+
"""Creates and manages the worker tasks for the queue."""
|
|
184
443
|
logger.debug("starting worker task for %s", self)
|
|
185
444
|
workers = [
|
|
186
445
|
create_task(
|
|
187
|
-
coro=self._worker_coro(),
|
|
446
|
+
coro=self._worker_coro(),
|
|
188
447
|
name=f"{self.name} [Task-{i}]",
|
|
189
448
|
log_destroy_pending=False,
|
|
190
|
-
)
|
|
449
|
+
)
|
|
450
|
+
for i in range(self.num_workers)
|
|
191
451
|
]
|
|
192
|
-
task = create_task(
|
|
452
|
+
task = create_task(
|
|
453
|
+
asyncio.gather(*workers),
|
|
454
|
+
name=f"{self.name} worker main Task",
|
|
455
|
+
log_destroy_pending=False,
|
|
456
|
+
)
|
|
193
457
|
task._workers = workers
|
|
194
458
|
return task
|
|
459
|
+
|
|
195
460
|
async def __worker_coro(self) -> NoReturn:
|
|
461
|
+
"""
|
|
462
|
+
The coroutine executed by worker tasks to process the queue.
|
|
463
|
+
"""
|
|
196
464
|
args: P.args
|
|
197
465
|
kwargs: P.kwargs
|
|
198
466
|
if self._no_futs:
|
|
@@ -217,15 +485,27 @@ class ProcessingQueue(_Queue[Tuple[P, "asyncio.Future[V]"]], Generic[P, V]):
|
|
|
217
485
|
result = await self.func(*args, **kwargs)
|
|
218
486
|
fut.set_result(result)
|
|
219
487
|
except asyncio.exceptions.InvalidStateError:
|
|
220
|
-
logger.error(
|
|
488
|
+
logger.error(
|
|
489
|
+
"cannot set result for %s %s: %s",
|
|
490
|
+
self.func.__name__,
|
|
491
|
+
fut,
|
|
492
|
+
result,
|
|
493
|
+
)
|
|
221
494
|
except Exception as e:
|
|
222
495
|
try:
|
|
223
496
|
fut.set_exception(e)
|
|
224
497
|
except asyncio.exceptions.InvalidStateError:
|
|
225
|
-
logger.error(
|
|
498
|
+
logger.error(
|
|
499
|
+
"cannot set exception for %s %s: %s",
|
|
500
|
+
self.func.__name__,
|
|
501
|
+
fut,
|
|
502
|
+
e,
|
|
503
|
+
)
|
|
226
504
|
self.task_done()
|
|
227
505
|
except Exception as e:
|
|
228
|
-
logger.error(
|
|
506
|
+
logger.error(
|
|
507
|
+
"%s for %s is broken!!!", type(self).__name__, self.func
|
|
508
|
+
)
|
|
229
509
|
logger.exception(e)
|
|
230
510
|
raise
|
|
231
511
|
|
|
@@ -235,97 +515,308 @@ def _validate_args(i: int, can_return_less: bool) -> None:
|
|
|
235
515
|
Validates the arguments for methods that retrieve multiple items from the queue.
|
|
236
516
|
|
|
237
517
|
Args:
|
|
238
|
-
i
|
|
239
|
-
can_return_less
|
|
518
|
+
i: The number of items to retrieve.
|
|
519
|
+
can_return_less: Whether the method is allowed to return fewer than `i` items.
|
|
240
520
|
|
|
241
521
|
Raises:
|
|
242
|
-
TypeError
|
|
243
|
-
ValueError
|
|
522
|
+
:exc:`~TypeError`: If `i` is not an integer or `can_return_less` is not a boolean.
|
|
523
|
+
:exc:`~ValueError`: If `i` is not greater than 1.
|
|
524
|
+
|
|
525
|
+
Example:
|
|
526
|
+
>>> _validate_args(i=2, can_return_less=False)
|
|
244
527
|
"""
|
|
245
528
|
if not isinstance(i, int):
|
|
246
529
|
raise TypeError(f"`i` must be an integer greater than 1. You passed {i}")
|
|
247
530
|
if not isinstance(can_return_less, bool):
|
|
248
|
-
raise TypeError(
|
|
531
|
+
raise TypeError(
|
|
532
|
+
f"`can_return_less` must be boolean. You passed {can_return_less}"
|
|
533
|
+
)
|
|
249
534
|
if i <= 1:
|
|
250
535
|
raise ValueError(f"`i` must be an integer greater than 1. You passed {i}")
|
|
251
536
|
|
|
252
537
|
|
|
253
|
-
|
|
254
538
|
class _SmartFutureRef(weakref.ref, Generic[T]):
|
|
539
|
+
"""
|
|
540
|
+
Weak reference for :class:`~_smart.SmartFuture` objects used in priority queues.
|
|
541
|
+
|
|
542
|
+
See Also:
|
|
543
|
+
:class:`~_smart.SmartFuture`
|
|
544
|
+
"""
|
|
545
|
+
|
|
255
546
|
def __lt__(self, other: "_SmartFutureRef[T]") -> bool:
|
|
256
547
|
"""
|
|
257
548
|
Compares two weak references to SmartFuture objects for ordering.
|
|
258
549
|
|
|
259
|
-
This comparison is used in priority queues to determine the order of processing. A SmartFuture
|
|
550
|
+
This comparison is used in priority queues to determine the order of processing. A SmartFuture
|
|
260
551
|
reference is considered less than another if it has more waiters or if it has been garbage collected.
|
|
261
552
|
|
|
262
553
|
Args:
|
|
263
|
-
other
|
|
554
|
+
other: The other SmartFuture reference to compare with.
|
|
264
555
|
|
|
265
556
|
Returns:
|
|
266
557
|
bool: True if this reference is less than the other, False otherwise.
|
|
558
|
+
|
|
559
|
+
Example:
|
|
560
|
+
>>> ref1 = _SmartFutureRef(fut1)
|
|
561
|
+
>>> ref2 = _SmartFutureRef(fut2)
|
|
562
|
+
>>> print(ref1 < ref2)
|
|
267
563
|
"""
|
|
268
564
|
strong_self = self()
|
|
269
565
|
if strong_self is None:
|
|
270
566
|
return True
|
|
271
567
|
strong_other = other()
|
|
272
|
-
if strong_other is None
|
|
273
|
-
|
|
274
|
-
return strong_self < strong_other
|
|
568
|
+
return False if strong_other is None else strong_self < strong_other
|
|
569
|
+
|
|
275
570
|
|
|
276
571
|
class _PriorityQueueMixin(Generic[T]):
|
|
572
|
+
"""
|
|
573
|
+
Mixin for creating priority queue functionality with support for custom comparison.
|
|
574
|
+
|
|
575
|
+
See Also:
|
|
576
|
+
:class:`~asyncio.PriorityQueue`
|
|
577
|
+
"""
|
|
578
|
+
|
|
277
579
|
def _init(self, maxsize):
|
|
580
|
+
"""
|
|
581
|
+
Initializes the priority queue.
|
|
582
|
+
|
|
583
|
+
Example:
|
|
584
|
+
>>> queue._init(maxsize=10)
|
|
585
|
+
"""
|
|
278
586
|
self._queue: List[T] = []
|
|
587
|
+
|
|
279
588
|
def _put(self, item, heappush=heapq.heappush):
|
|
589
|
+
"""
|
|
590
|
+
Adds an item to the priority queue based on its priority.
|
|
591
|
+
|
|
592
|
+
Example:
|
|
593
|
+
>>> queue._put(item='task')
|
|
594
|
+
"""
|
|
280
595
|
heappush(self._queue, item)
|
|
596
|
+
|
|
281
597
|
def _get(self, heappop=heapq.heappop):
|
|
598
|
+
"""
|
|
599
|
+
Retrieves the highest priority item from the queue.
|
|
600
|
+
|
|
601
|
+
Example:
|
|
602
|
+
>>> task = queue._get()
|
|
603
|
+
>>> print(task)
|
|
604
|
+
"""
|
|
282
605
|
return heappop(self._queue)
|
|
283
606
|
|
|
607
|
+
|
|
284
608
|
class PriorityProcessingQueue(_PriorityQueueMixin[T], ProcessingQueue[T, V]):
|
|
285
|
-
|
|
286
|
-
|
|
609
|
+
"""
|
|
610
|
+
A priority-based processing queue where tasks are processed based on priority.
|
|
611
|
+
|
|
612
|
+
This queue allows tasks to be added with a specified priority, ensuring that
|
|
613
|
+
higher priority tasks are processed before lower priority ones. It is ideal
|
|
614
|
+
for scenarios where task prioritization is crucial.
|
|
615
|
+
|
|
616
|
+
Example:
|
|
617
|
+
>>> async def process_task(data): return data.upper()
|
|
618
|
+
>>> queue = PriorityProcessingQueue(func=process_task, num_workers=5)
|
|
619
|
+
>>> fut = await queue.put(priority=1, item='task')
|
|
620
|
+
>>> print(await fut)
|
|
621
|
+
TASK
|
|
622
|
+
|
|
623
|
+
See Also:
|
|
624
|
+
:class:`~ProcessingQueue`
|
|
625
|
+
"""
|
|
626
|
+
|
|
627
|
+
async def put(
|
|
628
|
+
self, priority: Any, *args: P.args, **kwargs: P.kwargs
|
|
629
|
+
) -> "asyncio.Future[V]":
|
|
630
|
+
"""
|
|
631
|
+
Asynchronously adds a task with priority to the queue.
|
|
632
|
+
|
|
633
|
+
Args:
|
|
634
|
+
priority: The priority of the task.
|
|
635
|
+
args: Positional arguments for the task.
|
|
636
|
+
kwargs: Keyword arguments for the task.
|
|
637
|
+
|
|
638
|
+
Returns:
|
|
639
|
+
The future representing the result of the task.
|
|
640
|
+
|
|
641
|
+
Example:
|
|
642
|
+
>>> fut = await queue.put(priority=1, item='task')
|
|
643
|
+
>>> print(await fut)
|
|
644
|
+
"""
|
|
287
645
|
self._ensure_workers()
|
|
288
646
|
fut = asyncio.get_event_loop().create_future()
|
|
289
647
|
await super().put(self, (priority, args, kwargs, fut))
|
|
290
648
|
return fut
|
|
291
|
-
|
|
649
|
+
|
|
650
|
+
def put_nowait(
|
|
651
|
+
self, priority: Any, *args: P.args, **kwargs: P.kwargs
|
|
652
|
+
) -> "asyncio.Future[V]":
|
|
653
|
+
"""
|
|
654
|
+
Immediately adds a task with priority to the queue without waiting.
|
|
655
|
+
|
|
656
|
+
Args:
|
|
657
|
+
priority: The priority of the task.
|
|
658
|
+
args: Positional arguments for the task.
|
|
659
|
+
kwargs: Keyword arguments for the task.
|
|
660
|
+
|
|
661
|
+
Returns:
|
|
662
|
+
The future representing the result of the task.
|
|
663
|
+
|
|
664
|
+
Example:
|
|
665
|
+
>>> fut = queue.put_nowait(priority=1, item='task')
|
|
666
|
+
>>> print(await fut)
|
|
667
|
+
"""
|
|
292
668
|
self._ensure_workers()
|
|
293
669
|
fut = self._create_future()
|
|
294
670
|
super().put_nowait(self, (priority, args, kwargs, fut))
|
|
295
671
|
return fut
|
|
672
|
+
|
|
296
673
|
def _get(self, heappop=heapq.heappop):
|
|
674
|
+
"""
|
|
675
|
+
Retrieves the highest priority task from the queue.
|
|
676
|
+
|
|
677
|
+
Returns:
|
|
678
|
+
The priority, task arguments, keyword arguments, and future of the task.
|
|
679
|
+
|
|
680
|
+
Example:
|
|
681
|
+
>>> task = queue._get()
|
|
682
|
+
>>> print(task)
|
|
683
|
+
"""
|
|
297
684
|
priority, args, kwargs, fut = heappop(self._queue)
|
|
298
685
|
return args, kwargs, fut
|
|
299
686
|
|
|
687
|
+
|
|
300
688
|
class _VariablePriorityQueueMixin(_PriorityQueueMixin[T]):
|
|
689
|
+
"""
|
|
690
|
+
Mixin for priority queues where task priorities can be updated dynamically.
|
|
691
|
+
|
|
692
|
+
See Also:
|
|
693
|
+
:class:`~_PriorityQueueMixin`
|
|
694
|
+
"""
|
|
695
|
+
|
|
301
696
|
def _get(self, heapify=heapq.heapify, heappop=heapq.heappop):
|
|
302
|
-
"
|
|
303
|
-
|
|
697
|
+
"""
|
|
698
|
+
Resorts the priority queue to consider any changes in priorities and retrieves the task with the highest updated priority.
|
|
699
|
+
|
|
700
|
+
Args:
|
|
701
|
+
heapify: Function to resort the heap.
|
|
702
|
+
heappop: Function to pop the highest priority task.
|
|
703
|
+
|
|
704
|
+
Returns:
|
|
705
|
+
The highest priority task in the queue.
|
|
706
|
+
|
|
707
|
+
Example:
|
|
708
|
+
>>> task = queue._get()
|
|
709
|
+
>>> print(task)
|
|
710
|
+
"""
|
|
304
711
|
heapify(self._queue)
|
|
305
712
|
# take the job with the most waiters
|
|
306
713
|
return heappop(self._queue)
|
|
714
|
+
|
|
307
715
|
def _get_key(self, *args, **kwargs) -> _smart._Key:
|
|
716
|
+
"""
|
|
717
|
+
Generates a unique key for task identification based on arguments.
|
|
718
|
+
|
|
719
|
+
Args:
|
|
720
|
+
args: Positional arguments for the task.
|
|
721
|
+
kwargs: Keyword arguments for the task.
|
|
722
|
+
|
|
723
|
+
Returns:
|
|
724
|
+
The generated key for the task.
|
|
725
|
+
|
|
726
|
+
Example:
|
|
727
|
+
>>> key = queue._get_key(*args, **kwargs)
|
|
728
|
+
>>> print(key)
|
|
729
|
+
"""
|
|
308
730
|
return (args, tuple((kwarg, kwargs[kwarg]) for kwarg in sorted(kwargs)))
|
|
309
731
|
|
|
732
|
+
|
|
310
733
|
class VariablePriorityQueue(_VariablePriorityQueueMixin[T], asyncio.PriorityQueue):
|
|
311
|
-
"""
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
734
|
+
"""
|
|
735
|
+
A :class:`~asyncio.PriorityQueue` subclass that allows priorities to be updated (or computed) on the fly.
|
|
736
|
+
|
|
737
|
+
This queue supports dynamic priority updates, making it suitable for tasks
|
|
738
|
+
where priorities may change over time. It ensures that tasks are processed
|
|
739
|
+
based on the most current priority.
|
|
740
|
+
|
|
741
|
+
Example:
|
|
742
|
+
>>> queue = VariablePriorityQueue()
|
|
743
|
+
>>> queue.put_nowait((1, 'task1'))
|
|
744
|
+
>>> queue.put_nowait((2, 'task2'))
|
|
745
|
+
>>> task = queue.get_nowait()
|
|
746
|
+
>>> print(task)
|
|
747
|
+
|
|
748
|
+
See Also:
|
|
749
|
+
:class:`~asyncio.PriorityQueue`
|
|
750
|
+
"""
|
|
751
|
+
|
|
752
|
+
|
|
753
|
+
class SmartProcessingQueue(
|
|
754
|
+
_VariablePriorityQueueMixin[T], ProcessingQueue[Concatenate[T, P], V]
|
|
755
|
+
):
|
|
756
|
+
"""
|
|
757
|
+
A processing queue that will execute jobs with the most waiters first, supporting dynamic priorities.
|
|
758
|
+
|
|
759
|
+
This queue is designed to handle tasks with dynamic priorities, ensuring that
|
|
760
|
+
tasks with the most waiters are prioritized. It is ideal for scenarios where
|
|
761
|
+
task execution order is influenced by the number of waiters.
|
|
762
|
+
|
|
763
|
+
Example:
|
|
764
|
+
>>> async def process_task(data): return data.upper()
|
|
765
|
+
>>> queue = SmartProcessingQueue(func=process_task, num_workers=5)
|
|
766
|
+
>>> fut = await queue.put(item='task')
|
|
767
|
+
>>> print(await fut)
|
|
768
|
+
TASK
|
|
769
|
+
|
|
770
|
+
See Also:
|
|
771
|
+
:class:`~ProcessingQueue`
|
|
772
|
+
"""
|
|
773
|
+
|
|
316
774
|
_no_futs = False
|
|
317
|
-
|
|
775
|
+
"""Whether smart futures are used."""
|
|
776
|
+
|
|
777
|
+
_futs: "weakref.WeakValueDictionary[_smart._Key[T], _smart.SmartFuture[T]]"
|
|
778
|
+
"""
|
|
779
|
+
Weak reference dictionary for managing smart futures.
|
|
780
|
+
"""
|
|
781
|
+
|
|
318
782
|
def __init__(
|
|
319
|
-
self,
|
|
320
|
-
func: Callable[Concatenate[T, P], Awaitable[V]],
|
|
321
|
-
num_workers: int,
|
|
322
|
-
*,
|
|
783
|
+
self,
|
|
784
|
+
func: Callable[Concatenate[T, P], Awaitable[V]],
|
|
785
|
+
num_workers: int,
|
|
786
|
+
*,
|
|
323
787
|
name: str = "",
|
|
324
788
|
loop: Optional[asyncio.AbstractEventLoop] = None,
|
|
325
789
|
) -> None:
|
|
790
|
+
"""
|
|
791
|
+
Initializes a smart processing queue with the given worker function.
|
|
792
|
+
|
|
793
|
+
Args:
|
|
794
|
+
func: The worker function.
|
|
795
|
+
num_workers: Number of worker tasks.
|
|
796
|
+
name: Optional name for the queue.
|
|
797
|
+
loop: Optional event loop.
|
|
798
|
+
|
|
799
|
+
Example:
|
|
800
|
+
>>> queue = SmartProcessingQueue(func=my_task_func, num_workers=3, name='smart_queue')
|
|
801
|
+
"""
|
|
326
802
|
super().__init__(func, num_workers, return_data=True, name=name, loop=loop)
|
|
327
|
-
self._futs
|
|
803
|
+
self._futs = weakref.WeakValueDictionary()
|
|
804
|
+
|
|
328
805
|
async def put(self, *args: P.args, **kwargs: P.kwargs) -> _smart.SmartFuture[V]:
|
|
806
|
+
"""
|
|
807
|
+
Asynchronously adds a task with smart future handling to the queue.
|
|
808
|
+
|
|
809
|
+
Args:
|
|
810
|
+
args: Positional arguments for the task.
|
|
811
|
+
kwargs: Keyword arguments for the task.
|
|
812
|
+
|
|
813
|
+
Returns:
|
|
814
|
+
The future representing the task's result.
|
|
815
|
+
|
|
816
|
+
Example:
|
|
817
|
+
>>> fut = await queue.put(item='task')
|
|
818
|
+
>>> print(await fut)
|
|
819
|
+
"""
|
|
329
820
|
self._ensure_workers()
|
|
330
821
|
key = self._get_key(*args, **kwargs)
|
|
331
822
|
if fut := self._futs.get(key, None):
|
|
@@ -334,7 +825,22 @@ class SmartProcessingQueue(_VariablePriorityQueueMixin[T], ProcessingQueue[Conca
|
|
|
334
825
|
self._futs[key] = fut
|
|
335
826
|
await Queue.put(self, (_SmartFutureRef(fut), args, kwargs))
|
|
336
827
|
return fut
|
|
828
|
+
|
|
337
829
|
def put_nowait(self, *args: P.args, **kwargs: P.kwargs) -> _smart.SmartFuture[V]:
|
|
830
|
+
"""
|
|
831
|
+
Immediately adds a task with smart future handling to the queue without waiting.
|
|
832
|
+
|
|
833
|
+
Args:
|
|
834
|
+
args: Positional arguments for the task.
|
|
835
|
+
kwargs: Keyword arguments for the task.
|
|
836
|
+
|
|
837
|
+
Returns:
|
|
838
|
+
The future representing the task's result.
|
|
839
|
+
|
|
840
|
+
Example:
|
|
841
|
+
>>> fut = queue.put_nowait(item='task')
|
|
842
|
+
>>> print(await fut)
|
|
843
|
+
"""
|
|
338
844
|
self._ensure_workers()
|
|
339
845
|
key = self._get_key(*args, **kwargs)
|
|
340
846
|
if fut := self._futs.get(key, None):
|
|
@@ -343,12 +849,37 @@ class SmartProcessingQueue(_VariablePriorityQueueMixin[T], ProcessingQueue[Conca
|
|
|
343
849
|
self._futs[key] = fut
|
|
344
850
|
Queue.put_nowait(self, (_SmartFutureRef(fut), args, kwargs))
|
|
345
851
|
return fut
|
|
852
|
+
|
|
346
853
|
def _create_future(self, key: _smart._Key) -> "asyncio.Future[V]":
|
|
854
|
+
"""Creates a smart future for the task."""
|
|
347
855
|
return _smart.create_future(queue=self, key=key, loop=self._loop)
|
|
856
|
+
|
|
348
857
|
def _get(self):
|
|
858
|
+
"""
|
|
859
|
+
Retrieves the task with the highest priority from the queue.
|
|
860
|
+
|
|
861
|
+
Returns:
|
|
862
|
+
The priority, task arguments, keyword arguments, and future of the task.
|
|
863
|
+
|
|
864
|
+
Example:
|
|
865
|
+
>>> task = queue._get()
|
|
866
|
+
>>> print(task)
|
|
867
|
+
"""
|
|
349
868
|
fut, args, kwargs = super()._get()
|
|
350
869
|
return args, kwargs, fut()
|
|
870
|
+
|
|
351
871
|
async def __worker_coro(self) -> NoReturn:
|
|
872
|
+
"""
|
|
873
|
+
Worker coroutine responsible for processing tasks in the queue.
|
|
874
|
+
|
|
875
|
+
Retrieves tasks, executes them, and sets the results or exceptions for the futures.
|
|
876
|
+
|
|
877
|
+
Raises:
|
|
878
|
+
Any: Exceptions raised during task processing are logged.
|
|
879
|
+
|
|
880
|
+
Example:
|
|
881
|
+
>>> await queue.__worker_coro()
|
|
882
|
+
"""
|
|
352
883
|
args: P.args
|
|
353
884
|
kwargs: P.kwargs
|
|
354
885
|
fut: _smart.SmartFuture[V]
|
|
@@ -364,13 +895,23 @@ class SmartProcessingQueue(_VariablePriorityQueueMixin[T], ProcessingQueue[Conca
|
|
|
364
895
|
result = await self.func(*args, **kwargs)
|
|
365
896
|
fut.set_result(result)
|
|
366
897
|
except asyncio.exceptions.InvalidStateError:
|
|
367
|
-
logger.error(
|
|
898
|
+
logger.error(
|
|
899
|
+
"cannot set result for %s %s: %s",
|
|
900
|
+
self.func.__name__,
|
|
901
|
+
fut,
|
|
902
|
+
result,
|
|
903
|
+
)
|
|
368
904
|
except Exception as e:
|
|
369
905
|
logger.debug("%s: %s", type(e).__name__, e)
|
|
370
906
|
try:
|
|
371
907
|
fut.set_exception(e)
|
|
372
908
|
except asyncio.exceptions.InvalidStateError:
|
|
373
|
-
logger.error(
|
|
909
|
+
logger.error(
|
|
910
|
+
"cannot set exception for %s %s: %s",
|
|
911
|
+
self.func.__name__,
|
|
912
|
+
fut,
|
|
913
|
+
e,
|
|
914
|
+
)
|
|
374
915
|
self.task_done()
|
|
375
916
|
except Exception as e:
|
|
376
917
|
logger.error("%s for %s is broken!!!", type(self).__name__, self.func)
|