ez-a-sync 0.22.14__py3-none-any.whl → 0.22.15__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ez-a-sync might be problematic. Click here for more details.
- a_sync/ENVIRONMENT_VARIABLES.py +4 -3
- a_sync/__init__.py +30 -12
- a_sync/_smart.py +132 -28
- a_sync/_typing.py +56 -12
- a_sync/a_sync/__init__.py +35 -10
- a_sync/a_sync/_descriptor.py +74 -26
- a_sync/a_sync/_flags.py +14 -6
- a_sync/a_sync/_helpers.py +8 -7
- a_sync/a_sync/_kwargs.py +3 -2
- a_sync/a_sync/_meta.py +120 -28
- a_sync/a_sync/abstract.py +102 -28
- a_sync/a_sync/base.py +34 -16
- a_sync/a_sync/config.py +47 -13
- a_sync/a_sync/decorator.py +239 -117
- a_sync/a_sync/function.py +416 -146
- a_sync/a_sync/method.py +197 -59
- a_sync/a_sync/modifiers/__init__.py +47 -5
- a_sync/a_sync/modifiers/cache/__init__.py +46 -17
- a_sync/a_sync/modifiers/cache/memory.py +86 -20
- a_sync/a_sync/modifiers/limiter.py +52 -22
- a_sync/a_sync/modifiers/manager.py +98 -16
- a_sync/a_sync/modifiers/semaphores.py +48 -15
- a_sync/a_sync/property.py +383 -82
- a_sync/a_sync/singleton.py +1 -0
- a_sync/aliases.py +0 -1
- a_sync/asyncio/__init__.py +4 -1
- a_sync/asyncio/as_completed.py +177 -49
- a_sync/asyncio/create_task.py +31 -17
- a_sync/asyncio/gather.py +72 -52
- a_sync/asyncio/utils.py +3 -3
- a_sync/exceptions.py +78 -23
- a_sync/executor.py +118 -71
- a_sync/future.py +575 -158
- a_sync/iter.py +110 -50
- a_sync/primitives/__init__.py +14 -2
- a_sync/primitives/_debug.py +13 -13
- a_sync/primitives/_loggable.py +5 -4
- a_sync/primitives/locks/__init__.py +5 -2
- a_sync/primitives/locks/counter.py +38 -36
- a_sync/primitives/locks/event.py +21 -7
- a_sync/primitives/locks/prio_semaphore.py +182 -62
- a_sync/primitives/locks/semaphore.py +78 -77
- a_sync/primitives/queue.py +560 -58
- a_sync/sphinx/__init__.py +0 -1
- a_sync/sphinx/ext.py +160 -50
- a_sync/task.py +262 -97
- a_sync/utils/__init__.py +12 -6
- a_sync/utils/iterators.py +127 -43
- {ez_a_sync-0.22.14.dist-info → ez_a_sync-0.22.15.dist-info}/METADATA +1 -1
- ez_a_sync-0.22.15.dist-info/RECORD +74 -0
- {ez_a_sync-0.22.14.dist-info → ez_a_sync-0.22.15.dist-info}/WHEEL +1 -1
- tests/conftest.py +1 -2
- tests/executor.py +112 -9
- tests/fixtures.py +61 -32
- tests/test_abstract.py +7 -4
- tests/test_as_completed.py +54 -21
- tests/test_base.py +66 -17
- tests/test_cache.py +31 -15
- tests/test_decorator.py +54 -28
- tests/test_executor.py +8 -13
- tests/test_future.py +45 -8
- tests/test_gather.py +8 -2
- tests/test_helpers.py +2 -0
- tests/test_iter.py +55 -13
- tests/test_limiter.py +5 -3
- tests/test_meta.py +23 -9
- tests/test_modified.py +4 -1
- tests/test_semaphore.py +15 -8
- tests/test_singleton.py +15 -10
- tests/test_task.py +126 -28
- ez_a_sync-0.22.14.dist-info/RECORD +0 -74
- {ez_a_sync-0.22.14.dist-info → ez_a_sync-0.22.15.dist-info}/LICENSE.txt +0 -0
- {ez_a_sync-0.22.14.dist-info → ez_a_sync-0.22.15.dist-info}/top_level.txt +0 -0
a_sync/exceptions.py
CHANGED
|
@@ -15,27 +15,40 @@ class ASyncFlagException(ValueError):
|
|
|
15
15
|
"""
|
|
16
16
|
Base exception class for flag-related errors in the a_sync library.
|
|
17
17
|
"""
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
18
|
+
|
|
19
|
+
viable_flags = VIABLE_FLAGS
|
|
20
|
+
"""
|
|
21
|
+
The set of viable flags.
|
|
22
|
+
|
|
23
|
+
A-Sync uses 'flags' to indicate whether objects / fn calls will be sync or async.
|
|
24
|
+
You can use any of the provided flags, whichever makes most sense for your use case.
|
|
25
|
+
"""
|
|
24
26
|
|
|
25
27
|
def desc(self, target) -> str:
|
|
26
|
-
|
|
28
|
+
"""
|
|
29
|
+
Returns a description of the target for the flag error message.
|
|
30
|
+
|
|
31
|
+
Args:
|
|
32
|
+
target: The target object or string to describe.
|
|
33
|
+
|
|
34
|
+
Returns:
|
|
35
|
+
A string description of the target.
|
|
36
|
+
"""
|
|
37
|
+
if target == "kwargs":
|
|
27
38
|
return "flags present in 'kwargs'"
|
|
28
39
|
else:
|
|
29
|
-
return f
|
|
40
|
+
return f"flag attributes defined on {target}"
|
|
41
|
+
|
|
30
42
|
|
|
31
43
|
class NoFlagsFound(ASyncFlagException):
|
|
32
44
|
"""
|
|
33
45
|
Raised when no viable flags are found in the target.
|
|
34
46
|
"""
|
|
47
|
+
|
|
35
48
|
def __init__(self, target, kwargs_keys=None):
|
|
36
49
|
"""
|
|
37
50
|
Initializes the NoFlagsFound exception.
|
|
38
|
-
|
|
51
|
+
|
|
39
52
|
Args:
|
|
40
53
|
target: The target object where flags were expected.
|
|
41
54
|
kwargs_keys: Optional; keys in the kwargs if applicable.
|
|
@@ -47,14 +60,16 @@ class NoFlagsFound(ASyncFlagException):
|
|
|
47
60
|
err += "\nThis is likely an issue with a custom subclass definition."
|
|
48
61
|
super().__init__(err)
|
|
49
62
|
|
|
63
|
+
|
|
50
64
|
class TooManyFlags(ASyncFlagException):
|
|
51
65
|
"""
|
|
52
66
|
Raised when multiple flags are found, but only one was expected.
|
|
53
67
|
"""
|
|
68
|
+
|
|
54
69
|
def __init__(self, target, present_flags):
|
|
55
70
|
"""
|
|
56
71
|
Initializes the TooManyFlags exception.
|
|
57
|
-
|
|
72
|
+
|
|
58
73
|
Args:
|
|
59
74
|
target: The target object where flags were found.
|
|
60
75
|
present_flags: The flags that were found.
|
|
@@ -64,14 +79,16 @@ class TooManyFlags(ASyncFlagException):
|
|
|
64
79
|
err += "This is likely an issue with a custom subclass definition."
|
|
65
80
|
super().__init__(err)
|
|
66
81
|
|
|
82
|
+
|
|
67
83
|
class InvalidFlag(ASyncFlagException):
|
|
68
84
|
"""
|
|
69
85
|
Raised when an invalid flag is encountered.
|
|
70
86
|
"""
|
|
87
|
+
|
|
71
88
|
def __init__(self, flag: Optional[str]):
|
|
72
89
|
"""
|
|
73
90
|
Initializes the InvalidFlag exception.
|
|
74
|
-
|
|
91
|
+
|
|
75
92
|
Args:
|
|
76
93
|
flag: The invalid flag.
|
|
77
94
|
"""
|
|
@@ -79,28 +96,32 @@ class InvalidFlag(ASyncFlagException):
|
|
|
79
96
|
err += "\nThis code should not be reached and likely indicates an issue with a custom subclass definition."
|
|
80
97
|
super().__init__(err)
|
|
81
98
|
|
|
99
|
+
|
|
82
100
|
class InvalidFlagValue(ASyncFlagException):
|
|
83
101
|
"""
|
|
84
102
|
Raised when a flag has an invalid value.
|
|
85
103
|
"""
|
|
104
|
+
|
|
86
105
|
def __init__(self, flag: str, flag_value: Any):
|
|
87
106
|
"""
|
|
88
107
|
Initializes the InvalidFlagValue exception.
|
|
89
|
-
|
|
108
|
+
|
|
90
109
|
Args:
|
|
91
110
|
flag: The flag with an invalid value.
|
|
92
111
|
flag_value: The invalid value of the flag.
|
|
93
112
|
"""
|
|
94
113
|
super().__init__(f"'{flag}' should be boolean. You passed {flag_value}.")
|
|
95
114
|
|
|
115
|
+
|
|
96
116
|
class FlagNotDefined(ASyncFlagException):
|
|
97
117
|
"""
|
|
98
118
|
Raised when a flag is not defined on an object.
|
|
99
119
|
"""
|
|
120
|
+
|
|
100
121
|
def __init__(self, obj: Type, flag: str):
|
|
101
122
|
"""
|
|
102
123
|
Initializes the FlagNotDefined exception.
|
|
103
|
-
|
|
124
|
+
|
|
104
125
|
Args:
|
|
105
126
|
obj: The object where the flag is not defined.
|
|
106
127
|
flag: The undefined flag.
|
|
@@ -113,47 +134,62 @@ class ImproperFunctionType(ValueError):
|
|
|
113
134
|
Raised when a function that should be sync is async or vice-versa.
|
|
114
135
|
"""
|
|
115
136
|
|
|
137
|
+
|
|
116
138
|
class FunctionNotAsync(ImproperFunctionType):
|
|
117
139
|
"""
|
|
118
140
|
Raised when a function expected to be async is not.
|
|
119
141
|
"""
|
|
142
|
+
|
|
120
143
|
def __init__(self, fn):
|
|
121
144
|
"""
|
|
122
145
|
Initializes the FunctionNotAsync exception.
|
|
123
|
-
|
|
146
|
+
|
|
124
147
|
Args:
|
|
125
148
|
fn: The function that is not async.
|
|
126
149
|
"""
|
|
127
|
-
super().__init__(
|
|
150
|
+
super().__init__(
|
|
151
|
+
f"`coro_fn` must be a coroutine function defined with `async def`. You passed {fn}."
|
|
152
|
+
)
|
|
153
|
+
|
|
128
154
|
|
|
129
155
|
class FunctionNotSync(ImproperFunctionType):
|
|
130
156
|
"""
|
|
131
157
|
Raised when a function expected to be sync is not.
|
|
132
158
|
"""
|
|
159
|
+
|
|
133
160
|
def __init__(self, fn):
|
|
134
161
|
"""
|
|
135
162
|
Initializes the FunctionNotSync exception.
|
|
136
|
-
|
|
163
|
+
|
|
137
164
|
Args:
|
|
138
165
|
fn: The function that is not sync.
|
|
139
166
|
"""
|
|
140
|
-
super().__init__(
|
|
141
|
-
|
|
167
|
+
super().__init__(
|
|
168
|
+
f"`func` must be a coroutine function defined with `def`. You passed {fn}."
|
|
169
|
+
)
|
|
170
|
+
|
|
171
|
+
|
|
142
172
|
class ASyncRuntimeError(RuntimeError):
|
|
173
|
+
"""
|
|
174
|
+
Raised for runtime errors in asynchronous operations.
|
|
175
|
+
"""
|
|
176
|
+
|
|
143
177
|
def __init__(self, e: RuntimeError):
|
|
144
178
|
"""
|
|
145
179
|
Initializes the ASyncRuntimeError exception.
|
|
146
|
-
|
|
180
|
+
|
|
147
181
|
Args:
|
|
148
182
|
e: The original runtime error.
|
|
149
183
|
"""
|
|
150
184
|
super().__init__(str(e))
|
|
151
185
|
|
|
186
|
+
|
|
152
187
|
class SyncModeInAsyncContextError(ASyncRuntimeError):
|
|
153
188
|
"""
|
|
154
189
|
Raised when synchronous code is used within an asynchronous context.
|
|
155
190
|
"""
|
|
156
|
-
|
|
191
|
+
|
|
192
|
+
def __init__(self, err: str = ""):
|
|
157
193
|
"""
|
|
158
194
|
Initializes the SyncModeInAsyncContextError exception.
|
|
159
195
|
"""
|
|
@@ -163,16 +199,18 @@ class SyncModeInAsyncContextError(ASyncRuntimeError):
|
|
|
163
199
|
err += f"{VIABLE_FLAGS}"
|
|
164
200
|
super().__init__(err)
|
|
165
201
|
|
|
202
|
+
|
|
166
203
|
class MappingError(Exception):
|
|
167
204
|
"""
|
|
168
205
|
Base class for errors related to :class:`~TaskMapping`.
|
|
169
206
|
"""
|
|
207
|
+
|
|
170
208
|
_msg: str
|
|
171
209
|
|
|
172
|
-
def __init__(self, mapping: "TaskMapping", msg: str =
|
|
210
|
+
def __init__(self, mapping: "TaskMapping", msg: str = ""):
|
|
173
211
|
"""
|
|
174
212
|
Initializes the MappingError exception.
|
|
175
|
-
|
|
213
|
+
|
|
176
214
|
Args:
|
|
177
215
|
mapping: The TaskMapping where the error occurred.
|
|
178
216
|
msg: An optional message describing the error.
|
|
@@ -183,25 +221,42 @@ class MappingError(Exception):
|
|
|
183
221
|
super().__init__(msg)
|
|
184
222
|
self.mapping = mapping
|
|
185
223
|
|
|
224
|
+
|
|
186
225
|
class MappingIsEmptyError(MappingError):
|
|
187
226
|
"""
|
|
188
227
|
Raised when a TaskMapping is empty and an operation requires it to have items.
|
|
189
228
|
"""
|
|
229
|
+
|
|
190
230
|
_msg = "TaskMapping does not contain anything to yield"
|
|
191
231
|
|
|
232
|
+
|
|
192
233
|
class MappingNotEmptyError(MappingError):
|
|
193
234
|
"""
|
|
194
235
|
Raised when a TaskMapping is not empty and an operation requires it to be empty.
|
|
195
236
|
"""
|
|
237
|
+
|
|
196
238
|
_msg = "TaskMapping already contains some data. In order to use `map`, you need a fresh one"
|
|
197
239
|
|
|
240
|
+
|
|
198
241
|
class PersistedTaskException(Exception):
|
|
242
|
+
"""
|
|
243
|
+
Raised when an exception persists in an asyncio Task.
|
|
244
|
+
"""
|
|
245
|
+
|
|
199
246
|
def __init__(self, exc: E, task: asyncio.Task) -> None:
|
|
247
|
+
"""
|
|
248
|
+
Initializes the PersistedTaskException exception.
|
|
249
|
+
|
|
250
|
+
Args:
|
|
251
|
+
exc: The exception that persisted.
|
|
252
|
+
task: The asyncio Task where the exception occurred.
|
|
253
|
+
"""
|
|
200
254
|
super().__init__(f"{exc.__class__.__name__}: {exc}", task)
|
|
201
255
|
self.exception = exc
|
|
202
256
|
self.task = task
|
|
203
257
|
|
|
258
|
+
|
|
204
259
|
class EmptySequenceError(ValueError):
|
|
205
260
|
"""
|
|
206
|
-
Raised when an operation is attempted on an empty sequence but items are
|
|
261
|
+
Raised when an operation is attempted on an empty sequence but items are required.
|
|
207
262
|
"""
|
a_sync/executor.py
CHANGED
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
"""
|
|
2
2
|
With these executors, you can simply run sync functions in your executor with `await executor.run(fn, *args)`.
|
|
3
3
|
|
|
4
|
-
`executor.submit(fn, *args)` will work the same as the concurrent.futures implementation, but will return an asyncio.Future instead of a concurrent.futures.Future
|
|
4
|
+
`executor.submit(fn, *args)` will work the same as the concurrent.futures implementation, but will return an asyncio.Future instead of a concurrent.futures.Future.
|
|
5
5
|
|
|
6
6
|
This module provides several executor classes:
|
|
7
|
-
- _AsyncExecutorMixin: A mixin providing asynchronous run and submit methods.
|
|
7
|
+
- _AsyncExecutorMixin: A mixin providing asynchronous run and submit methods, with support for synchronous mode.
|
|
8
8
|
- AsyncProcessPoolExecutor: An async process pool executor.
|
|
9
9
|
- AsyncThreadPoolExecutor: An async thread pool executor.
|
|
10
|
-
- PruningThreadPoolExecutor: A thread pool executor that prunes inactive threads after a timeout.
|
|
10
|
+
- PruningThreadPoolExecutor: A thread pool executor that prunes inactive threads after a timeout, ensuring at least one thread remains active.
|
|
11
11
|
"""
|
|
12
12
|
|
|
13
13
|
import asyncio
|
|
@@ -27,42 +27,45 @@ TEN_MINUTES = 60 * 10
|
|
|
27
27
|
|
|
28
28
|
Initializer = Callable[..., object]
|
|
29
29
|
|
|
30
|
+
|
|
30
31
|
class _AsyncExecutorMixin(cf.Executor, _DebugDaemonMixin):
|
|
31
32
|
"""
|
|
32
33
|
A mixin for Executors to provide asynchronous run and submit methods.
|
|
33
34
|
"""
|
|
35
|
+
|
|
34
36
|
_max_workers: int
|
|
37
|
+
|
|
35
38
|
_workers: str
|
|
39
|
+
"""The type of workers used."""
|
|
40
|
+
|
|
36
41
|
__slots__ = "_max_workers", "_initializer", "_initargs", "_broken", "_shutdown_lock"
|
|
37
42
|
|
|
38
|
-
async def run(self, fn: Callable[P, T], *args: P.args, **kwargs: P.kwargs)
|
|
43
|
+
async def run(self, fn: Callable[P, T], *args: P.args, **kwargs: P.kwargs):
|
|
39
44
|
"""
|
|
40
|
-
A shorthand way to call `await asyncio.get_event_loop().run_in_executor(this_executor, fn, *args)
|
|
45
|
+
A shorthand way to call `await asyncio.get_event_loop().run_in_executor(this_executor, fn, *args)`.
|
|
41
46
|
Doesn't `await this_executor.run(fn, *args)` look so much better?
|
|
42
|
-
|
|
47
|
+
|
|
43
48
|
Oh, and you can also use kwargs!
|
|
44
49
|
|
|
45
50
|
Args:
|
|
46
|
-
fn
|
|
51
|
+
fn: The function to run.
|
|
47
52
|
*args: Positional arguments for the function.
|
|
48
53
|
**kwargs: Keyword arguments for the function.
|
|
49
|
-
|
|
50
|
-
Returns:
|
|
51
|
-
T: The result of the function.
|
|
52
54
|
"""
|
|
53
|
-
return
|
|
55
|
+
return (
|
|
56
|
+
fn(*args, **kwargs)
|
|
57
|
+
if self.sync_mode
|
|
58
|
+
else await self.submit(fn, *args, **kwargs)
|
|
59
|
+
)
|
|
54
60
|
|
|
55
61
|
def submit(self, fn: Callable[P, T], *args: P.args, **kwargs: P.kwargs) -> "asyncio.Future[T]": # type: ignore [override]
|
|
56
62
|
"""
|
|
57
63
|
Submits a job to the executor and returns an asyncio.Future that can be awaited for the result without blocking.
|
|
58
64
|
|
|
59
65
|
Args:
|
|
60
|
-
fn
|
|
66
|
+
fn: The function to submit.
|
|
61
67
|
*args: Positional arguments for the function.
|
|
62
68
|
**kwargs: Keyword arguments for the function.
|
|
63
|
-
|
|
64
|
-
Returns:
|
|
65
|
-
asyncio.Future[T]: The future representing the result of the function.
|
|
66
69
|
"""
|
|
67
70
|
if self.sync_mode:
|
|
68
71
|
fut = asyncio.get_event_loop().create_future()
|
|
@@ -86,9 +89,6 @@ class _AsyncExecutorMixin(cf.Executor, _DebugDaemonMixin):
|
|
|
86
89
|
def sync_mode(self) -> bool:
|
|
87
90
|
"""
|
|
88
91
|
Indicates if the executor is in synchronous mode (max_workers == 0).
|
|
89
|
-
|
|
90
|
-
Returns:
|
|
91
|
-
bool: True if in synchronous mode, False otherwise.
|
|
92
92
|
"""
|
|
93
93
|
return self._max_workers == 0
|
|
94
94
|
|
|
@@ -96,9 +96,6 @@ class _AsyncExecutorMixin(cf.Executor, _DebugDaemonMixin):
|
|
|
96
96
|
def worker_count_current(self) -> int:
|
|
97
97
|
"""
|
|
98
98
|
Returns the current number of workers.
|
|
99
|
-
|
|
100
|
-
Returns:
|
|
101
|
-
int: The current number of workers.
|
|
102
99
|
"""
|
|
103
100
|
return len(getattr(self, f"_{self._workers}"))
|
|
104
101
|
|
|
@@ -107,14 +104,14 @@ class _AsyncExecutorMixin(cf.Executor, _DebugDaemonMixin):
|
|
|
107
104
|
Runs until manually cancelled by the finished work item.
|
|
108
105
|
|
|
109
106
|
Args:
|
|
110
|
-
fut
|
|
107
|
+
fut: The future being debugged.
|
|
111
108
|
fn: The function being executed.
|
|
112
109
|
*args: Positional arguments for the function.
|
|
113
110
|
**kwargs: Keyword arguments for the function.
|
|
114
111
|
"""
|
|
115
112
|
# TODO: make prettier strings for other types
|
|
116
113
|
if type(fn).__name__ == "function":
|
|
117
|
-
fnid = getattr(fn,
|
|
114
|
+
fnid = getattr(fn, "__qualname__", fn.__name__)
|
|
118
115
|
if fn.__module__:
|
|
119
116
|
fnid = f"{fn.__module__}.{fnid}"
|
|
120
117
|
else:
|
|
@@ -125,27 +122,44 @@ class _AsyncExecutorMixin(cf.Executor, _DebugDaemonMixin):
|
|
|
125
122
|
msg = f"{msg[:-1]} {', '.join(f'{k}={v}' for k, v in kwargs.items())})"
|
|
126
123
|
else:
|
|
127
124
|
msg = f"{msg[:-2]})"
|
|
128
|
-
|
|
125
|
+
|
|
129
126
|
while not fut.done():
|
|
130
127
|
await asyncio.sleep(15)
|
|
131
128
|
if not fut.done():
|
|
132
129
|
self.logger.debug(msg, self, fnid)
|
|
133
|
-
|
|
130
|
+
|
|
131
|
+
|
|
134
132
|
# Process
|
|
135
133
|
|
|
134
|
+
|
|
136
135
|
class AsyncProcessPoolExecutor(_AsyncExecutorMixin, cf.ProcessPoolExecutor):
|
|
137
136
|
"""
|
|
138
137
|
An async process pool executor that allows use of kwargs.
|
|
138
|
+
|
|
139
|
+
Attributes:
|
|
140
|
+
_workers:
|
|
139
141
|
"""
|
|
142
|
+
|
|
140
143
|
_workers = "processes"
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
+
"""The type of workers used, set to "processes"."""
|
|
145
|
+
|
|
146
|
+
__slots__ = (
|
|
147
|
+
"_mp_context",
|
|
148
|
+
"_processes",
|
|
149
|
+
"_pending_work_items",
|
|
150
|
+
"_call_queue",
|
|
151
|
+
"_result_queue",
|
|
152
|
+
"_queue_management_thread",
|
|
153
|
+
"_queue_count",
|
|
154
|
+
"_shutdown_thread",
|
|
155
|
+
"_work_ids",
|
|
156
|
+
"_queue_management_thread_wakeup",
|
|
157
|
+
)
|
|
144
158
|
|
|
145
159
|
def __init__(
|
|
146
|
-
self,
|
|
147
|
-
max_workers: Optional[int] = None,
|
|
148
|
-
mp_context: Optional[multiprocessing.context.BaseContext] = None,
|
|
160
|
+
self,
|
|
161
|
+
max_workers: Optional[int] = None,
|
|
162
|
+
mp_context: Optional[multiprocessing.context.BaseContext] = None,
|
|
149
163
|
initializer: Optional[Initializer] = None,
|
|
150
164
|
initargs: Tuple[Any, ...] = (),
|
|
151
165
|
) -> None:
|
|
@@ -153,10 +167,10 @@ class AsyncProcessPoolExecutor(_AsyncExecutorMixin, cf.ProcessPoolExecutor):
|
|
|
153
167
|
Initializes the AsyncProcessPoolExecutor.
|
|
154
168
|
|
|
155
169
|
Args:
|
|
156
|
-
max_workers
|
|
157
|
-
mp_context
|
|
158
|
-
initializer
|
|
159
|
-
initargs
|
|
170
|
+
max_workers: The maximum number of workers. Defaults to None.
|
|
171
|
+
mp_context: The multiprocessing context. Defaults to None.
|
|
172
|
+
initializer: An initializer callable. Defaults to None.
|
|
173
|
+
initargs: Arguments for the initializer. Defaults to ().
|
|
160
174
|
"""
|
|
161
175
|
if max_workers == 0:
|
|
162
176
|
super().__init__(1, mp_context, initializer, initargs)
|
|
@@ -164,19 +178,30 @@ class AsyncProcessPoolExecutor(_AsyncExecutorMixin, cf.ProcessPoolExecutor):
|
|
|
164
178
|
else:
|
|
165
179
|
super().__init__(max_workers, mp_context, initializer, initargs)
|
|
166
180
|
|
|
181
|
+
|
|
167
182
|
# Thread
|
|
168
183
|
|
|
184
|
+
|
|
169
185
|
class AsyncThreadPoolExecutor(_AsyncExecutorMixin, cf.ThreadPoolExecutor):
|
|
170
186
|
"""
|
|
171
187
|
An async thread pool executor that allows use of kwargs.
|
|
172
188
|
"""
|
|
189
|
+
|
|
173
190
|
_workers = "threads"
|
|
174
|
-
|
|
191
|
+
"""The type of workers used, set to "threads"."""
|
|
192
|
+
|
|
193
|
+
__slots__ = (
|
|
194
|
+
"_work_queue",
|
|
195
|
+
"_idle_semaphore",
|
|
196
|
+
"_threads",
|
|
197
|
+
"_shutdown",
|
|
198
|
+
"_thread_name_prefix",
|
|
199
|
+
)
|
|
175
200
|
|
|
176
201
|
def __init__(
|
|
177
|
-
self,
|
|
178
|
-
max_workers: Optional[int] = None,
|
|
179
|
-
thread_name_prefix: str =
|
|
202
|
+
self,
|
|
203
|
+
max_workers: Optional[int] = None,
|
|
204
|
+
thread_name_prefix: str = "",
|
|
180
205
|
initializer: Optional[Initializer] = None,
|
|
181
206
|
initargs: Tuple[Any, ...] = (),
|
|
182
207
|
) -> None:
|
|
@@ -184,24 +209,28 @@ class AsyncThreadPoolExecutor(_AsyncExecutorMixin, cf.ThreadPoolExecutor):
|
|
|
184
209
|
Initializes the AsyncThreadPoolExecutor.
|
|
185
210
|
|
|
186
211
|
Args:
|
|
187
|
-
max_workers
|
|
188
|
-
thread_name_prefix
|
|
189
|
-
initializer
|
|
190
|
-
initargs
|
|
212
|
+
max_workers: The maximum number of workers. Defaults to None.
|
|
213
|
+
thread_name_prefix: Prefix for thread names. Defaults to ''.
|
|
214
|
+
initializer: An initializer callable. Defaults to None.
|
|
215
|
+
initargs: Arguments for the initializer. Defaults to ().
|
|
191
216
|
"""
|
|
192
217
|
if max_workers == 0:
|
|
193
218
|
super().__init__(1, thread_name_prefix, initializer, initargs)
|
|
194
219
|
self._max_workers = 0
|
|
195
220
|
else:
|
|
196
221
|
super().__init__(max_workers, thread_name_prefix, initializer, initargs)
|
|
197
|
-
|
|
222
|
+
|
|
223
|
+
|
|
198
224
|
# For backward-compatibility
|
|
199
225
|
ProcessPoolExecutor = AsyncProcessPoolExecutor
|
|
200
226
|
ThreadPoolExecutor = AsyncThreadPoolExecutor
|
|
201
227
|
|
|
202
228
|
# Pruning thread pool
|
|
203
229
|
|
|
204
|
-
|
|
230
|
+
|
|
231
|
+
def _worker(
|
|
232
|
+
executor_reference, work_queue, initializer, initargs, timeout
|
|
233
|
+
): # NOTE: NEW 'timeout'
|
|
205
234
|
"""
|
|
206
235
|
Worker function for the PruningThreadPoolExecutor.
|
|
207
236
|
|
|
@@ -216,22 +245,21 @@ def _worker(executor_reference, work_queue, initializer, initargs, timeout): #
|
|
|
216
245
|
try:
|
|
217
246
|
initializer(*initargs)
|
|
218
247
|
except BaseException:
|
|
219
|
-
_base.LOGGER.critical(
|
|
248
|
+
_base.LOGGER.critical("Exception in initializer:", exc_info=True)
|
|
220
249
|
executor = executor_reference()
|
|
221
250
|
if executor is not None:
|
|
222
251
|
executor._initializer_failed()
|
|
223
252
|
return
|
|
224
|
-
|
|
253
|
+
|
|
225
254
|
try:
|
|
226
255
|
while True:
|
|
227
256
|
try: # NOTE: NEW
|
|
228
|
-
work_item = work_queue.get(block=True,
|
|
229
|
-
timeout=timeout) # NOTE: NEW
|
|
257
|
+
work_item = work_queue.get(block=True, timeout=timeout) # NOTE: NEW
|
|
230
258
|
except queue.Empty: # NOTE: NEW
|
|
231
259
|
# Its been 'timeout' seconds and there are no new work items. # NOTE: NEW
|
|
232
260
|
# Let's suicide the thread. # NOTE: NEW
|
|
233
261
|
executor = executor_reference() # NOTE: NEW
|
|
234
|
-
|
|
262
|
+
|
|
235
263
|
with executor._adjusting_lock: # NOTE: NEW
|
|
236
264
|
# NOTE: We keep a minimum of one thread active to prevent locks
|
|
237
265
|
if len(executor) > 1: # NOTE: NEW
|
|
@@ -240,9 +268,9 @@ def _worker(executor_reference, work_queue, initializer, initargs, timeout): #
|
|
|
240
268
|
thread._threads_queues.pop(t) # NOTE: NEW
|
|
241
269
|
# Let the executor know we have one less idle thread available
|
|
242
270
|
executor._idle_semaphore.acquire(blocking=False) # NOTE: NEW
|
|
243
|
-
return # NOTE: NEW
|
|
271
|
+
return # NOTE: NEW
|
|
244
272
|
continue
|
|
245
|
-
|
|
273
|
+
|
|
246
274
|
if work_item is not None:
|
|
247
275
|
work_item.run()
|
|
248
276
|
# Delete references to object. See issue16284
|
|
@@ -269,34 +297,48 @@ def _worker(executor_reference, work_queue, initializer, initargs, timeout): #
|
|
|
269
297
|
return
|
|
270
298
|
del executor
|
|
271
299
|
except BaseException:
|
|
272
|
-
_base.LOGGER.critical(
|
|
300
|
+
_base.LOGGER.critical("Exception in worker", exc_info=True)
|
|
301
|
+
|
|
273
302
|
|
|
274
303
|
class PruningThreadPoolExecutor(AsyncThreadPoolExecutor):
|
|
275
304
|
"""
|
|
276
305
|
This `AsyncThreadPoolExecutor` implementation prunes inactive threads after 'timeout' seconds without a work item.
|
|
277
306
|
Pruned threads will be automatically recreated as needed for future workloads. Up to 'max_threads' can be active at any one time.
|
|
307
|
+
A minimum of one thread will remain active to prevent locks.
|
|
278
308
|
"""
|
|
309
|
+
|
|
279
310
|
__slots__ = "_timeout", "_adjusting_lock"
|
|
280
311
|
|
|
281
|
-
def __init__(
|
|
282
|
-
|
|
312
|
+
def __init__(
|
|
313
|
+
self,
|
|
314
|
+
max_workers=None,
|
|
315
|
+
thread_name_prefix="",
|
|
316
|
+
initializer=None,
|
|
317
|
+
initargs=(),
|
|
318
|
+
timeout=TEN_MINUTES,
|
|
319
|
+
):
|
|
283
320
|
"""
|
|
284
321
|
Initializes the PruningThreadPoolExecutor.
|
|
285
322
|
|
|
286
323
|
Args:
|
|
287
|
-
max_workers
|
|
288
|
-
thread_name_prefix
|
|
289
|
-
initializer
|
|
290
|
-
initargs
|
|
291
|
-
timeout
|
|
324
|
+
max_workers: The maximum number of workers. Defaults to None.
|
|
325
|
+
thread_name_prefix: Prefix for thread names. Defaults to ''.
|
|
326
|
+
initializer: An initializer callable. Defaults to None.
|
|
327
|
+
initargs: Arguments for the initializer. Defaults to ().
|
|
328
|
+
timeout: Timeout duration for pruning inactive threads. Defaults to TEN_MINUTES.
|
|
292
329
|
"""
|
|
293
|
-
|
|
330
|
+
|
|
331
|
+
self._timeout = timeout
|
|
332
|
+
"""Timeout duration for pruning inactive threads."""
|
|
333
|
+
|
|
294
334
|
self._adjusting_lock = threading.Lock()
|
|
335
|
+
"""Lock used to adjust the number of threads."""
|
|
336
|
+
|
|
295
337
|
super().__init__(max_workers, thread_name_prefix, initializer, initargs)
|
|
296
|
-
|
|
338
|
+
|
|
297
339
|
def __len__(self) -> int:
|
|
298
340
|
return len(self._threads)
|
|
299
|
-
|
|
341
|
+
|
|
300
342
|
def _adjust_thread_count(self):
|
|
301
343
|
"""
|
|
302
344
|
Adjusts the number of threads based on workload and idle threads.
|
|
@@ -313,19 +355,24 @@ class PruningThreadPoolExecutor(AsyncThreadPoolExecutor):
|
|
|
313
355
|
|
|
314
356
|
num_threads = len(self._threads)
|
|
315
357
|
if num_threads < self._max_workers:
|
|
316
|
-
thread_name =
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
358
|
+
thread_name = "%s_%d" % (self._thread_name_prefix or self, num_threads)
|
|
359
|
+
t = threading.Thread(
|
|
360
|
+
name=thread_name,
|
|
361
|
+
target=_worker,
|
|
362
|
+
args=(
|
|
363
|
+
weakref.ref(self, weakref_cb),
|
|
364
|
+
self._work_queue,
|
|
365
|
+
self._initializer,
|
|
366
|
+
self._initargs,
|
|
367
|
+
self._timeout,
|
|
368
|
+
),
|
|
369
|
+
)
|
|
324
370
|
t.daemon = True
|
|
325
371
|
t.start()
|
|
326
372
|
self._threads.add(t)
|
|
327
373
|
thread._threads_queues[t] = self._work_queue
|
|
328
374
|
|
|
375
|
+
|
|
329
376
|
executor = PruningThreadPoolExecutor(128)
|
|
330
377
|
|
|
331
378
|
__all__ = [
|