ez-a-sync 0.22.13__py3-none-any.whl → 0.22.15__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ez-a-sync might be problematic. Click here for more details.
- a_sync/ENVIRONMENT_VARIABLES.py +4 -3
- a_sync/__init__.py +30 -12
- a_sync/_smart.py +132 -28
- a_sync/_typing.py +56 -12
- a_sync/a_sync/__init__.py +35 -10
- a_sync/a_sync/_descriptor.py +74 -26
- a_sync/a_sync/_flags.py +14 -6
- a_sync/a_sync/_helpers.py +8 -7
- a_sync/a_sync/_kwargs.py +3 -2
- a_sync/a_sync/_meta.py +120 -28
- a_sync/a_sync/abstract.py +102 -28
- a_sync/a_sync/base.py +34 -16
- a_sync/a_sync/config.py +47 -13
- a_sync/a_sync/decorator.py +239 -117
- a_sync/a_sync/function.py +416 -146
- a_sync/a_sync/method.py +197 -59
- a_sync/a_sync/modifiers/__init__.py +47 -5
- a_sync/a_sync/modifiers/cache/__init__.py +46 -17
- a_sync/a_sync/modifiers/cache/memory.py +86 -20
- a_sync/a_sync/modifiers/limiter.py +52 -22
- a_sync/a_sync/modifiers/manager.py +98 -16
- a_sync/a_sync/modifiers/semaphores.py +48 -15
- a_sync/a_sync/property.py +383 -82
- a_sync/a_sync/singleton.py +1 -0
- a_sync/aliases.py +0 -1
- a_sync/asyncio/__init__.py +4 -1
- a_sync/asyncio/as_completed.py +177 -49
- a_sync/asyncio/create_task.py +31 -17
- a_sync/asyncio/gather.py +72 -52
- a_sync/asyncio/utils.py +3 -3
- a_sync/exceptions.py +78 -23
- a_sync/executor.py +120 -71
- a_sync/future.py +575 -158
- a_sync/iter.py +110 -50
- a_sync/primitives/__init__.py +14 -2
- a_sync/primitives/_debug.py +13 -13
- a_sync/primitives/_loggable.py +5 -4
- a_sync/primitives/locks/__init__.py +5 -2
- a_sync/primitives/locks/counter.py +38 -36
- a_sync/primitives/locks/event.py +21 -7
- a_sync/primitives/locks/prio_semaphore.py +182 -62
- a_sync/primitives/locks/semaphore.py +78 -77
- a_sync/primitives/queue.py +560 -58
- a_sync/sphinx/__init__.py +0 -1
- a_sync/sphinx/ext.py +160 -50
- a_sync/task.py +262 -97
- a_sync/utils/__init__.py +12 -6
- a_sync/utils/iterators.py +127 -43
- {ez_a_sync-0.22.13.dist-info → ez_a_sync-0.22.15.dist-info}/METADATA +1 -1
- ez_a_sync-0.22.15.dist-info/RECORD +74 -0
- {ez_a_sync-0.22.13.dist-info → ez_a_sync-0.22.15.dist-info}/WHEEL +1 -1
- tests/conftest.py +1 -2
- tests/executor.py +112 -9
- tests/fixtures.py +61 -32
- tests/test_abstract.py +7 -4
- tests/test_as_completed.py +54 -21
- tests/test_base.py +66 -17
- tests/test_cache.py +31 -15
- tests/test_decorator.py +54 -28
- tests/test_executor.py +8 -13
- tests/test_future.py +45 -8
- tests/test_gather.py +8 -2
- tests/test_helpers.py +2 -0
- tests/test_iter.py +55 -13
- tests/test_limiter.py +5 -3
- tests/test_meta.py +23 -9
- tests/test_modified.py +4 -1
- tests/test_semaphore.py +15 -8
- tests/test_singleton.py +15 -10
- tests/test_task.py +126 -28
- ez_a_sync-0.22.13.dist-info/RECORD +0 -74
- {ez_a_sync-0.22.13.dist-info → ez_a_sync-0.22.15.dist-info}/LICENSE.txt +0 -0
- {ez_a_sync-0.22.13.dist-info → ez_a_sync-0.22.15.dist-info}/top_level.txt +0 -0
a_sync/exceptions.py
CHANGED
|
@@ -15,27 +15,40 @@ class ASyncFlagException(ValueError):
|
|
|
15
15
|
"""
|
|
16
16
|
Base exception class for flag-related errors in the a_sync library.
|
|
17
17
|
"""
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
18
|
+
|
|
19
|
+
viable_flags = VIABLE_FLAGS
|
|
20
|
+
"""
|
|
21
|
+
The set of viable flags.
|
|
22
|
+
|
|
23
|
+
A-Sync uses 'flags' to indicate whether objects / fn calls will be sync or async.
|
|
24
|
+
You can use any of the provided flags, whichever makes most sense for your use case.
|
|
25
|
+
"""
|
|
24
26
|
|
|
25
27
|
def desc(self, target) -> str:
|
|
26
|
-
|
|
28
|
+
"""
|
|
29
|
+
Returns a description of the target for the flag error message.
|
|
30
|
+
|
|
31
|
+
Args:
|
|
32
|
+
target: The target object or string to describe.
|
|
33
|
+
|
|
34
|
+
Returns:
|
|
35
|
+
A string description of the target.
|
|
36
|
+
"""
|
|
37
|
+
if target == "kwargs":
|
|
27
38
|
return "flags present in 'kwargs'"
|
|
28
39
|
else:
|
|
29
|
-
return f
|
|
40
|
+
return f"flag attributes defined on {target}"
|
|
41
|
+
|
|
30
42
|
|
|
31
43
|
class NoFlagsFound(ASyncFlagException):
|
|
32
44
|
"""
|
|
33
45
|
Raised when no viable flags are found in the target.
|
|
34
46
|
"""
|
|
47
|
+
|
|
35
48
|
def __init__(self, target, kwargs_keys=None):
|
|
36
49
|
"""
|
|
37
50
|
Initializes the NoFlagsFound exception.
|
|
38
|
-
|
|
51
|
+
|
|
39
52
|
Args:
|
|
40
53
|
target: The target object where flags were expected.
|
|
41
54
|
kwargs_keys: Optional; keys in the kwargs if applicable.
|
|
@@ -47,14 +60,16 @@ class NoFlagsFound(ASyncFlagException):
|
|
|
47
60
|
err += "\nThis is likely an issue with a custom subclass definition."
|
|
48
61
|
super().__init__(err)
|
|
49
62
|
|
|
63
|
+
|
|
50
64
|
class TooManyFlags(ASyncFlagException):
|
|
51
65
|
"""
|
|
52
66
|
Raised when multiple flags are found, but only one was expected.
|
|
53
67
|
"""
|
|
68
|
+
|
|
54
69
|
def __init__(self, target, present_flags):
|
|
55
70
|
"""
|
|
56
71
|
Initializes the TooManyFlags exception.
|
|
57
|
-
|
|
72
|
+
|
|
58
73
|
Args:
|
|
59
74
|
target: The target object where flags were found.
|
|
60
75
|
present_flags: The flags that were found.
|
|
@@ -64,14 +79,16 @@ class TooManyFlags(ASyncFlagException):
|
|
|
64
79
|
err += "This is likely an issue with a custom subclass definition."
|
|
65
80
|
super().__init__(err)
|
|
66
81
|
|
|
82
|
+
|
|
67
83
|
class InvalidFlag(ASyncFlagException):
|
|
68
84
|
"""
|
|
69
85
|
Raised when an invalid flag is encountered.
|
|
70
86
|
"""
|
|
87
|
+
|
|
71
88
|
def __init__(self, flag: Optional[str]):
|
|
72
89
|
"""
|
|
73
90
|
Initializes the InvalidFlag exception.
|
|
74
|
-
|
|
91
|
+
|
|
75
92
|
Args:
|
|
76
93
|
flag: The invalid flag.
|
|
77
94
|
"""
|
|
@@ -79,28 +96,32 @@ class InvalidFlag(ASyncFlagException):
|
|
|
79
96
|
err += "\nThis code should not be reached and likely indicates an issue with a custom subclass definition."
|
|
80
97
|
super().__init__(err)
|
|
81
98
|
|
|
99
|
+
|
|
82
100
|
class InvalidFlagValue(ASyncFlagException):
|
|
83
101
|
"""
|
|
84
102
|
Raised when a flag has an invalid value.
|
|
85
103
|
"""
|
|
104
|
+
|
|
86
105
|
def __init__(self, flag: str, flag_value: Any):
|
|
87
106
|
"""
|
|
88
107
|
Initializes the InvalidFlagValue exception.
|
|
89
|
-
|
|
108
|
+
|
|
90
109
|
Args:
|
|
91
110
|
flag: The flag with an invalid value.
|
|
92
111
|
flag_value: The invalid value of the flag.
|
|
93
112
|
"""
|
|
94
113
|
super().__init__(f"'{flag}' should be boolean. You passed {flag_value}.")
|
|
95
114
|
|
|
115
|
+
|
|
96
116
|
class FlagNotDefined(ASyncFlagException):
|
|
97
117
|
"""
|
|
98
118
|
Raised when a flag is not defined on an object.
|
|
99
119
|
"""
|
|
120
|
+
|
|
100
121
|
def __init__(self, obj: Type, flag: str):
|
|
101
122
|
"""
|
|
102
123
|
Initializes the FlagNotDefined exception.
|
|
103
|
-
|
|
124
|
+
|
|
104
125
|
Args:
|
|
105
126
|
obj: The object where the flag is not defined.
|
|
106
127
|
flag: The undefined flag.
|
|
@@ -113,47 +134,62 @@ class ImproperFunctionType(ValueError):
|
|
|
113
134
|
Raised when a function that should be sync is async or vice-versa.
|
|
114
135
|
"""
|
|
115
136
|
|
|
137
|
+
|
|
116
138
|
class FunctionNotAsync(ImproperFunctionType):
|
|
117
139
|
"""
|
|
118
140
|
Raised when a function expected to be async is not.
|
|
119
141
|
"""
|
|
142
|
+
|
|
120
143
|
def __init__(self, fn):
|
|
121
144
|
"""
|
|
122
145
|
Initializes the FunctionNotAsync exception.
|
|
123
|
-
|
|
146
|
+
|
|
124
147
|
Args:
|
|
125
148
|
fn: The function that is not async.
|
|
126
149
|
"""
|
|
127
|
-
super().__init__(
|
|
150
|
+
super().__init__(
|
|
151
|
+
f"`coro_fn` must be a coroutine function defined with `async def`. You passed {fn}."
|
|
152
|
+
)
|
|
153
|
+
|
|
128
154
|
|
|
129
155
|
class FunctionNotSync(ImproperFunctionType):
|
|
130
156
|
"""
|
|
131
157
|
Raised when a function expected to be sync is not.
|
|
132
158
|
"""
|
|
159
|
+
|
|
133
160
|
def __init__(self, fn):
|
|
134
161
|
"""
|
|
135
162
|
Initializes the FunctionNotSync exception.
|
|
136
|
-
|
|
163
|
+
|
|
137
164
|
Args:
|
|
138
165
|
fn: The function that is not sync.
|
|
139
166
|
"""
|
|
140
|
-
super().__init__(
|
|
141
|
-
|
|
167
|
+
super().__init__(
|
|
168
|
+
f"`func` must be a coroutine function defined with `def`. You passed {fn}."
|
|
169
|
+
)
|
|
170
|
+
|
|
171
|
+
|
|
142
172
|
class ASyncRuntimeError(RuntimeError):
|
|
173
|
+
"""
|
|
174
|
+
Raised for runtime errors in asynchronous operations.
|
|
175
|
+
"""
|
|
176
|
+
|
|
143
177
|
def __init__(self, e: RuntimeError):
|
|
144
178
|
"""
|
|
145
179
|
Initializes the ASyncRuntimeError exception.
|
|
146
|
-
|
|
180
|
+
|
|
147
181
|
Args:
|
|
148
182
|
e: The original runtime error.
|
|
149
183
|
"""
|
|
150
184
|
super().__init__(str(e))
|
|
151
185
|
|
|
186
|
+
|
|
152
187
|
class SyncModeInAsyncContextError(ASyncRuntimeError):
|
|
153
188
|
"""
|
|
154
189
|
Raised when synchronous code is used within an asynchronous context.
|
|
155
190
|
"""
|
|
156
|
-
|
|
191
|
+
|
|
192
|
+
def __init__(self, err: str = ""):
|
|
157
193
|
"""
|
|
158
194
|
Initializes the SyncModeInAsyncContextError exception.
|
|
159
195
|
"""
|
|
@@ -163,16 +199,18 @@ class SyncModeInAsyncContextError(ASyncRuntimeError):
|
|
|
163
199
|
err += f"{VIABLE_FLAGS}"
|
|
164
200
|
super().__init__(err)
|
|
165
201
|
|
|
202
|
+
|
|
166
203
|
class MappingError(Exception):
|
|
167
204
|
"""
|
|
168
205
|
Base class for errors related to :class:`~TaskMapping`.
|
|
169
206
|
"""
|
|
207
|
+
|
|
170
208
|
_msg: str
|
|
171
209
|
|
|
172
|
-
def __init__(self, mapping: "TaskMapping", msg: str =
|
|
210
|
+
def __init__(self, mapping: "TaskMapping", msg: str = ""):
|
|
173
211
|
"""
|
|
174
212
|
Initializes the MappingError exception.
|
|
175
|
-
|
|
213
|
+
|
|
176
214
|
Args:
|
|
177
215
|
mapping: The TaskMapping where the error occurred.
|
|
178
216
|
msg: An optional message describing the error.
|
|
@@ -183,25 +221,42 @@ class MappingError(Exception):
|
|
|
183
221
|
super().__init__(msg)
|
|
184
222
|
self.mapping = mapping
|
|
185
223
|
|
|
224
|
+
|
|
186
225
|
class MappingIsEmptyError(MappingError):
|
|
187
226
|
"""
|
|
188
227
|
Raised when a TaskMapping is empty and an operation requires it to have items.
|
|
189
228
|
"""
|
|
229
|
+
|
|
190
230
|
_msg = "TaskMapping does not contain anything to yield"
|
|
191
231
|
|
|
232
|
+
|
|
192
233
|
class MappingNotEmptyError(MappingError):
|
|
193
234
|
"""
|
|
194
235
|
Raised when a TaskMapping is not empty and an operation requires it to be empty.
|
|
195
236
|
"""
|
|
237
|
+
|
|
196
238
|
_msg = "TaskMapping already contains some data. In order to use `map`, you need a fresh one"
|
|
197
239
|
|
|
240
|
+
|
|
198
241
|
class PersistedTaskException(Exception):
|
|
242
|
+
"""
|
|
243
|
+
Raised when an exception persists in an asyncio Task.
|
|
244
|
+
"""
|
|
245
|
+
|
|
199
246
|
def __init__(self, exc: E, task: asyncio.Task) -> None:
|
|
247
|
+
"""
|
|
248
|
+
Initializes the PersistedTaskException exception.
|
|
249
|
+
|
|
250
|
+
Args:
|
|
251
|
+
exc: The exception that persisted.
|
|
252
|
+
task: The asyncio Task where the exception occurred.
|
|
253
|
+
"""
|
|
200
254
|
super().__init__(f"{exc.__class__.__name__}: {exc}", task)
|
|
201
255
|
self.exception = exc
|
|
202
256
|
self.task = task
|
|
203
257
|
|
|
258
|
+
|
|
204
259
|
class EmptySequenceError(ValueError):
|
|
205
260
|
"""
|
|
206
|
-
Raised when an operation is attempted on an empty sequence but items are
|
|
261
|
+
Raised when an operation is attempted on an empty sequence but items are required.
|
|
207
262
|
"""
|
a_sync/executor.py
CHANGED
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
"""
|
|
2
2
|
With these executors, you can simply run sync functions in your executor with `await executor.run(fn, *args)`.
|
|
3
3
|
|
|
4
|
-
`executor.submit(fn, *args)` will work the same as the concurrent.futures implementation, but will return an asyncio.Future instead of a concurrent.futures.Future
|
|
4
|
+
`executor.submit(fn, *args)` will work the same as the concurrent.futures implementation, but will return an asyncio.Future instead of a concurrent.futures.Future.
|
|
5
5
|
|
|
6
6
|
This module provides several executor classes:
|
|
7
|
-
- _AsyncExecutorMixin: A mixin providing asynchronous run and submit methods.
|
|
7
|
+
- _AsyncExecutorMixin: A mixin providing asynchronous run and submit methods, with support for synchronous mode.
|
|
8
8
|
- AsyncProcessPoolExecutor: An async process pool executor.
|
|
9
9
|
- AsyncThreadPoolExecutor: An async thread pool executor.
|
|
10
|
-
- PruningThreadPoolExecutor: A thread pool executor that prunes inactive threads after a timeout.
|
|
10
|
+
- PruningThreadPoolExecutor: A thread pool executor that prunes inactive threads after a timeout, ensuring at least one thread remains active.
|
|
11
11
|
"""
|
|
12
12
|
|
|
13
13
|
import asyncio
|
|
@@ -27,42 +27,45 @@ TEN_MINUTES = 60 * 10
|
|
|
27
27
|
|
|
28
28
|
Initializer = Callable[..., object]
|
|
29
29
|
|
|
30
|
+
|
|
30
31
|
class _AsyncExecutorMixin(cf.Executor, _DebugDaemonMixin):
|
|
31
32
|
"""
|
|
32
33
|
A mixin for Executors to provide asynchronous run and submit methods.
|
|
33
34
|
"""
|
|
35
|
+
|
|
34
36
|
_max_workers: int
|
|
37
|
+
|
|
35
38
|
_workers: str
|
|
39
|
+
"""The type of workers used."""
|
|
40
|
+
|
|
36
41
|
__slots__ = "_max_workers", "_initializer", "_initargs", "_broken", "_shutdown_lock"
|
|
37
42
|
|
|
38
|
-
async def run(self, fn: Callable[P, T], *args: P.args, **kwargs: P.kwargs)
|
|
43
|
+
async def run(self, fn: Callable[P, T], *args: P.args, **kwargs: P.kwargs):
|
|
39
44
|
"""
|
|
40
|
-
A shorthand way to call `await asyncio.get_event_loop().run_in_executor(this_executor, fn, *args)
|
|
45
|
+
A shorthand way to call `await asyncio.get_event_loop().run_in_executor(this_executor, fn, *args)`.
|
|
41
46
|
Doesn't `await this_executor.run(fn, *args)` look so much better?
|
|
42
|
-
|
|
47
|
+
|
|
43
48
|
Oh, and you can also use kwargs!
|
|
44
49
|
|
|
45
50
|
Args:
|
|
46
|
-
fn
|
|
51
|
+
fn: The function to run.
|
|
47
52
|
*args: Positional arguments for the function.
|
|
48
53
|
**kwargs: Keyword arguments for the function.
|
|
49
|
-
|
|
50
|
-
Returns:
|
|
51
|
-
T: The result of the function.
|
|
52
54
|
"""
|
|
53
|
-
return
|
|
55
|
+
return (
|
|
56
|
+
fn(*args, **kwargs)
|
|
57
|
+
if self.sync_mode
|
|
58
|
+
else await self.submit(fn, *args, **kwargs)
|
|
59
|
+
)
|
|
54
60
|
|
|
55
61
|
def submit(self, fn: Callable[P, T], *args: P.args, **kwargs: P.kwargs) -> "asyncio.Future[T]": # type: ignore [override]
|
|
56
62
|
"""
|
|
57
63
|
Submits a job to the executor and returns an asyncio.Future that can be awaited for the result without blocking.
|
|
58
64
|
|
|
59
65
|
Args:
|
|
60
|
-
fn
|
|
66
|
+
fn: The function to submit.
|
|
61
67
|
*args: Positional arguments for the function.
|
|
62
68
|
**kwargs: Keyword arguments for the function.
|
|
63
|
-
|
|
64
|
-
Returns:
|
|
65
|
-
asyncio.Future[T]: The future representing the result of the function.
|
|
66
69
|
"""
|
|
67
70
|
if self.sync_mode:
|
|
68
71
|
fut = asyncio.get_event_loop().create_future()
|
|
@@ -86,9 +89,6 @@ class _AsyncExecutorMixin(cf.Executor, _DebugDaemonMixin):
|
|
|
86
89
|
def sync_mode(self) -> bool:
|
|
87
90
|
"""
|
|
88
91
|
Indicates if the executor is in synchronous mode (max_workers == 0).
|
|
89
|
-
|
|
90
|
-
Returns:
|
|
91
|
-
bool: True if in synchronous mode, False otherwise.
|
|
92
92
|
"""
|
|
93
93
|
return self._max_workers == 0
|
|
94
94
|
|
|
@@ -96,9 +96,6 @@ class _AsyncExecutorMixin(cf.Executor, _DebugDaemonMixin):
|
|
|
96
96
|
def worker_count_current(self) -> int:
|
|
97
97
|
"""
|
|
98
98
|
Returns the current number of workers.
|
|
99
|
-
|
|
100
|
-
Returns:
|
|
101
|
-
int: The current number of workers.
|
|
102
99
|
"""
|
|
103
100
|
return len(getattr(self, f"_{self._workers}"))
|
|
104
101
|
|
|
@@ -107,43 +104,62 @@ class _AsyncExecutorMixin(cf.Executor, _DebugDaemonMixin):
|
|
|
107
104
|
Runs until manually cancelled by the finished work item.
|
|
108
105
|
|
|
109
106
|
Args:
|
|
110
|
-
fut
|
|
107
|
+
fut: The future being debugged.
|
|
111
108
|
fn: The function being executed.
|
|
112
109
|
*args: Positional arguments for the function.
|
|
113
110
|
**kwargs: Keyword arguments for the function.
|
|
114
111
|
"""
|
|
115
112
|
# TODO: make prettier strings for other types
|
|
116
113
|
if type(fn).__name__ == "function":
|
|
117
|
-
fnid = getattr(fn,
|
|
114
|
+
fnid = getattr(fn, "__qualname__", fn.__name__)
|
|
118
115
|
if fn.__module__:
|
|
119
116
|
fnid = f"{fn.__module__}.{fnid}"
|
|
117
|
+
else:
|
|
118
|
+
fnid = fn
|
|
120
119
|
|
|
121
120
|
msg = f"%s processing %s{args}"
|
|
122
121
|
if kwargs:
|
|
123
122
|
msg = f"{msg[:-1]} {', '.join(f'{k}={v}' for k, v in kwargs.items())})"
|
|
124
123
|
else:
|
|
125
124
|
msg = f"{msg[:-2]})"
|
|
126
|
-
|
|
125
|
+
|
|
127
126
|
while not fut.done():
|
|
128
127
|
await asyncio.sleep(15)
|
|
129
128
|
if not fut.done():
|
|
130
129
|
self.logger.debug(msg, self, fnid)
|
|
131
|
-
|
|
130
|
+
|
|
131
|
+
|
|
132
132
|
# Process
|
|
133
133
|
|
|
134
|
+
|
|
134
135
|
class AsyncProcessPoolExecutor(_AsyncExecutorMixin, cf.ProcessPoolExecutor):
|
|
135
136
|
"""
|
|
136
137
|
An async process pool executor that allows use of kwargs.
|
|
138
|
+
|
|
139
|
+
Attributes:
|
|
140
|
+
_workers:
|
|
137
141
|
"""
|
|
142
|
+
|
|
138
143
|
_workers = "processes"
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
144
|
+
"""The type of workers used, set to "processes"."""
|
|
145
|
+
|
|
146
|
+
__slots__ = (
|
|
147
|
+
"_mp_context",
|
|
148
|
+
"_processes",
|
|
149
|
+
"_pending_work_items",
|
|
150
|
+
"_call_queue",
|
|
151
|
+
"_result_queue",
|
|
152
|
+
"_queue_management_thread",
|
|
153
|
+
"_queue_count",
|
|
154
|
+
"_shutdown_thread",
|
|
155
|
+
"_work_ids",
|
|
156
|
+
"_queue_management_thread_wakeup",
|
|
157
|
+
)
|
|
142
158
|
|
|
143
159
|
def __init__(
|
|
144
|
-
self,
|
|
145
|
-
max_workers: Optional[int] = None,
|
|
146
|
-
mp_context: Optional[multiprocessing.context.BaseContext] = None,
|
|
160
|
+
self,
|
|
161
|
+
max_workers: Optional[int] = None,
|
|
162
|
+
mp_context: Optional[multiprocessing.context.BaseContext] = None,
|
|
147
163
|
initializer: Optional[Initializer] = None,
|
|
148
164
|
initargs: Tuple[Any, ...] = (),
|
|
149
165
|
) -> None:
|
|
@@ -151,10 +167,10 @@ class AsyncProcessPoolExecutor(_AsyncExecutorMixin, cf.ProcessPoolExecutor):
|
|
|
151
167
|
Initializes the AsyncProcessPoolExecutor.
|
|
152
168
|
|
|
153
169
|
Args:
|
|
154
|
-
max_workers
|
|
155
|
-
mp_context
|
|
156
|
-
initializer
|
|
157
|
-
initargs
|
|
170
|
+
max_workers: The maximum number of workers. Defaults to None.
|
|
171
|
+
mp_context: The multiprocessing context. Defaults to None.
|
|
172
|
+
initializer: An initializer callable. Defaults to None.
|
|
173
|
+
initargs: Arguments for the initializer. Defaults to ().
|
|
158
174
|
"""
|
|
159
175
|
if max_workers == 0:
|
|
160
176
|
super().__init__(1, mp_context, initializer, initargs)
|
|
@@ -162,19 +178,30 @@ class AsyncProcessPoolExecutor(_AsyncExecutorMixin, cf.ProcessPoolExecutor):
|
|
|
162
178
|
else:
|
|
163
179
|
super().__init__(max_workers, mp_context, initializer, initargs)
|
|
164
180
|
|
|
181
|
+
|
|
165
182
|
# Thread
|
|
166
183
|
|
|
184
|
+
|
|
167
185
|
class AsyncThreadPoolExecutor(_AsyncExecutorMixin, cf.ThreadPoolExecutor):
|
|
168
186
|
"""
|
|
169
187
|
An async thread pool executor that allows use of kwargs.
|
|
170
188
|
"""
|
|
189
|
+
|
|
171
190
|
_workers = "threads"
|
|
172
|
-
|
|
191
|
+
"""The type of workers used, set to "threads"."""
|
|
192
|
+
|
|
193
|
+
__slots__ = (
|
|
194
|
+
"_work_queue",
|
|
195
|
+
"_idle_semaphore",
|
|
196
|
+
"_threads",
|
|
197
|
+
"_shutdown",
|
|
198
|
+
"_thread_name_prefix",
|
|
199
|
+
)
|
|
173
200
|
|
|
174
201
|
def __init__(
|
|
175
|
-
self,
|
|
176
|
-
max_workers: Optional[int] = None,
|
|
177
|
-
thread_name_prefix: str =
|
|
202
|
+
self,
|
|
203
|
+
max_workers: Optional[int] = None,
|
|
204
|
+
thread_name_prefix: str = "",
|
|
178
205
|
initializer: Optional[Initializer] = None,
|
|
179
206
|
initargs: Tuple[Any, ...] = (),
|
|
180
207
|
) -> None:
|
|
@@ -182,24 +209,28 @@ class AsyncThreadPoolExecutor(_AsyncExecutorMixin, cf.ThreadPoolExecutor):
|
|
|
182
209
|
Initializes the AsyncThreadPoolExecutor.
|
|
183
210
|
|
|
184
211
|
Args:
|
|
185
|
-
max_workers
|
|
186
|
-
thread_name_prefix
|
|
187
|
-
initializer
|
|
188
|
-
initargs
|
|
212
|
+
max_workers: The maximum number of workers. Defaults to None.
|
|
213
|
+
thread_name_prefix: Prefix for thread names. Defaults to ''.
|
|
214
|
+
initializer: An initializer callable. Defaults to None.
|
|
215
|
+
initargs: Arguments for the initializer. Defaults to ().
|
|
189
216
|
"""
|
|
190
217
|
if max_workers == 0:
|
|
191
218
|
super().__init__(1, thread_name_prefix, initializer, initargs)
|
|
192
219
|
self._max_workers = 0
|
|
193
220
|
else:
|
|
194
221
|
super().__init__(max_workers, thread_name_prefix, initializer, initargs)
|
|
195
|
-
|
|
222
|
+
|
|
223
|
+
|
|
196
224
|
# For backward-compatibility
|
|
197
225
|
ProcessPoolExecutor = AsyncProcessPoolExecutor
|
|
198
226
|
ThreadPoolExecutor = AsyncThreadPoolExecutor
|
|
199
227
|
|
|
200
228
|
# Pruning thread pool
|
|
201
229
|
|
|
202
|
-
|
|
230
|
+
|
|
231
|
+
def _worker(
|
|
232
|
+
executor_reference, work_queue, initializer, initargs, timeout
|
|
233
|
+
): # NOTE: NEW 'timeout'
|
|
203
234
|
"""
|
|
204
235
|
Worker function for the PruningThreadPoolExecutor.
|
|
205
236
|
|
|
@@ -214,22 +245,21 @@ def _worker(executor_reference, work_queue, initializer, initargs, timeout): #
|
|
|
214
245
|
try:
|
|
215
246
|
initializer(*initargs)
|
|
216
247
|
except BaseException:
|
|
217
|
-
_base.LOGGER.critical(
|
|
248
|
+
_base.LOGGER.critical("Exception in initializer:", exc_info=True)
|
|
218
249
|
executor = executor_reference()
|
|
219
250
|
if executor is not None:
|
|
220
251
|
executor._initializer_failed()
|
|
221
252
|
return
|
|
222
|
-
|
|
253
|
+
|
|
223
254
|
try:
|
|
224
255
|
while True:
|
|
225
256
|
try: # NOTE: NEW
|
|
226
|
-
work_item = work_queue.get(block=True,
|
|
227
|
-
timeout=timeout) # NOTE: NEW
|
|
257
|
+
work_item = work_queue.get(block=True, timeout=timeout) # NOTE: NEW
|
|
228
258
|
except queue.Empty: # NOTE: NEW
|
|
229
259
|
# Its been 'timeout' seconds and there are no new work items. # NOTE: NEW
|
|
230
260
|
# Let's suicide the thread. # NOTE: NEW
|
|
231
261
|
executor = executor_reference() # NOTE: NEW
|
|
232
|
-
|
|
262
|
+
|
|
233
263
|
with executor._adjusting_lock: # NOTE: NEW
|
|
234
264
|
# NOTE: We keep a minimum of one thread active to prevent locks
|
|
235
265
|
if len(executor) > 1: # NOTE: NEW
|
|
@@ -238,9 +268,9 @@ def _worker(executor_reference, work_queue, initializer, initargs, timeout): #
|
|
|
238
268
|
thread._threads_queues.pop(t) # NOTE: NEW
|
|
239
269
|
# Let the executor know we have one less idle thread available
|
|
240
270
|
executor._idle_semaphore.acquire(blocking=False) # NOTE: NEW
|
|
241
|
-
return # NOTE: NEW
|
|
271
|
+
return # NOTE: NEW
|
|
242
272
|
continue
|
|
243
|
-
|
|
273
|
+
|
|
244
274
|
if work_item is not None:
|
|
245
275
|
work_item.run()
|
|
246
276
|
# Delete references to object. See issue16284
|
|
@@ -267,34 +297,48 @@ def _worker(executor_reference, work_queue, initializer, initargs, timeout): #
|
|
|
267
297
|
return
|
|
268
298
|
del executor
|
|
269
299
|
except BaseException:
|
|
270
|
-
_base.LOGGER.critical(
|
|
300
|
+
_base.LOGGER.critical("Exception in worker", exc_info=True)
|
|
301
|
+
|
|
271
302
|
|
|
272
303
|
class PruningThreadPoolExecutor(AsyncThreadPoolExecutor):
|
|
273
304
|
"""
|
|
274
305
|
This `AsyncThreadPoolExecutor` implementation prunes inactive threads after 'timeout' seconds without a work item.
|
|
275
306
|
Pruned threads will be automatically recreated as needed for future workloads. Up to 'max_threads' can be active at any one time.
|
|
307
|
+
A minimum of one thread will remain active to prevent locks.
|
|
276
308
|
"""
|
|
309
|
+
|
|
277
310
|
__slots__ = "_timeout", "_adjusting_lock"
|
|
278
311
|
|
|
279
|
-
def __init__(
|
|
280
|
-
|
|
312
|
+
def __init__(
|
|
313
|
+
self,
|
|
314
|
+
max_workers=None,
|
|
315
|
+
thread_name_prefix="",
|
|
316
|
+
initializer=None,
|
|
317
|
+
initargs=(),
|
|
318
|
+
timeout=TEN_MINUTES,
|
|
319
|
+
):
|
|
281
320
|
"""
|
|
282
321
|
Initializes the PruningThreadPoolExecutor.
|
|
283
322
|
|
|
284
323
|
Args:
|
|
285
|
-
max_workers
|
|
286
|
-
thread_name_prefix
|
|
287
|
-
initializer
|
|
288
|
-
initargs
|
|
289
|
-
timeout
|
|
324
|
+
max_workers: The maximum number of workers. Defaults to None.
|
|
325
|
+
thread_name_prefix: Prefix for thread names. Defaults to ''.
|
|
326
|
+
initializer: An initializer callable. Defaults to None.
|
|
327
|
+
initargs: Arguments for the initializer. Defaults to ().
|
|
328
|
+
timeout: Timeout duration for pruning inactive threads. Defaults to TEN_MINUTES.
|
|
290
329
|
"""
|
|
291
|
-
|
|
330
|
+
|
|
331
|
+
self._timeout = timeout
|
|
332
|
+
"""Timeout duration for pruning inactive threads."""
|
|
333
|
+
|
|
292
334
|
self._adjusting_lock = threading.Lock()
|
|
335
|
+
"""Lock used to adjust the number of threads."""
|
|
336
|
+
|
|
293
337
|
super().__init__(max_workers, thread_name_prefix, initializer, initargs)
|
|
294
|
-
|
|
338
|
+
|
|
295
339
|
def __len__(self) -> int:
|
|
296
340
|
return len(self._threads)
|
|
297
|
-
|
|
341
|
+
|
|
298
342
|
def _adjust_thread_count(self):
|
|
299
343
|
"""
|
|
300
344
|
Adjusts the number of threads based on workload and idle threads.
|
|
@@ -311,19 +355,24 @@ class PruningThreadPoolExecutor(AsyncThreadPoolExecutor):
|
|
|
311
355
|
|
|
312
356
|
num_threads = len(self._threads)
|
|
313
357
|
if num_threads < self._max_workers:
|
|
314
|
-
thread_name =
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
358
|
+
thread_name = "%s_%d" % (self._thread_name_prefix or self, num_threads)
|
|
359
|
+
t = threading.Thread(
|
|
360
|
+
name=thread_name,
|
|
361
|
+
target=_worker,
|
|
362
|
+
args=(
|
|
363
|
+
weakref.ref(self, weakref_cb),
|
|
364
|
+
self._work_queue,
|
|
365
|
+
self._initializer,
|
|
366
|
+
self._initargs,
|
|
367
|
+
self._timeout,
|
|
368
|
+
),
|
|
369
|
+
)
|
|
322
370
|
t.daemon = True
|
|
323
371
|
t.start()
|
|
324
372
|
self._threads.add(t)
|
|
325
373
|
thread._threads_queues[t] = self._work_queue
|
|
326
374
|
|
|
375
|
+
|
|
327
376
|
executor = PruningThreadPoolExecutor(128)
|
|
328
377
|
|
|
329
378
|
__all__ = [
|