tinybird 0.0.1.dev0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of tinybird might be problematic. Click here for more details.
- tinybird/__cli__.py +8 -0
- tinybird/ch_utils/constants.py +244 -0
- tinybird/ch_utils/engine.py +855 -0
- tinybird/check_pypi.py +25 -0
- tinybird/client.py +1281 -0
- tinybird/config.py +117 -0
- tinybird/connectors.py +428 -0
- tinybird/context.py +23 -0
- tinybird/datafile.py +5589 -0
- tinybird/datatypes.py +434 -0
- tinybird/feedback_manager.py +1022 -0
- tinybird/git_settings.py +145 -0
- tinybird/sql.py +865 -0
- tinybird/sql_template.py +2343 -0
- tinybird/sql_template_fmt.py +281 -0
- tinybird/sql_toolset.py +350 -0
- tinybird/syncasync.py +682 -0
- tinybird/tb_cli.py +25 -0
- tinybird/tb_cli_modules/auth.py +252 -0
- tinybird/tb_cli_modules/branch.py +1043 -0
- tinybird/tb_cli_modules/cicd.py +434 -0
- tinybird/tb_cli_modules/cli.py +1571 -0
- tinybird/tb_cli_modules/common.py +2082 -0
- tinybird/tb_cli_modules/config.py +344 -0
- tinybird/tb_cli_modules/connection.py +803 -0
- tinybird/tb_cli_modules/datasource.py +900 -0
- tinybird/tb_cli_modules/exceptions.py +91 -0
- tinybird/tb_cli_modules/fmt.py +91 -0
- tinybird/tb_cli_modules/job.py +85 -0
- tinybird/tb_cli_modules/pipe.py +858 -0
- tinybird/tb_cli_modules/regions.py +9 -0
- tinybird/tb_cli_modules/tag.py +100 -0
- tinybird/tb_cli_modules/telemetry.py +310 -0
- tinybird/tb_cli_modules/test.py +107 -0
- tinybird/tb_cli_modules/tinyunit/tinyunit.py +340 -0
- tinybird/tb_cli_modules/tinyunit/tinyunit_lib.py +71 -0
- tinybird/tb_cli_modules/token.py +349 -0
- tinybird/tb_cli_modules/workspace.py +269 -0
- tinybird/tb_cli_modules/workspace_members.py +212 -0
- tinybird/tornado_template.py +1194 -0
- tinybird-0.0.1.dev0.dist-info/METADATA +2815 -0
- tinybird-0.0.1.dev0.dist-info/RECORD +45 -0
- tinybird-0.0.1.dev0.dist-info/WHEEL +5 -0
- tinybird-0.0.1.dev0.dist-info/entry_points.txt +2 -0
- tinybird-0.0.1.dev0.dist-info/top_level.txt +4 -0
tinybird/syncasync.py
ADDED
|
@@ -0,0 +1,682 @@
|
|
|
1
|
+
# Copyright (c) Django Software Foundation and individual contributors.
|
|
2
|
+
# All rights reserved.
|
|
3
|
+
#
|
|
4
|
+
# Redistribution and use in source and binary forms, with or without modification,
|
|
5
|
+
# are permitted provided that the following conditions are met:
|
|
6
|
+
#
|
|
7
|
+
# 1. Redistributions of source code must retain the above copyright notice,
|
|
8
|
+
# this list of conditions and the following disclaimer.
|
|
9
|
+
#
|
|
10
|
+
# 2. Redistributions in binary form must reproduce the above copyright
|
|
11
|
+
# notice, this list of conditions and the following disclaimer in the
|
|
12
|
+
# documentation and/or other materials provided with the distribution.
|
|
13
|
+
#
|
|
14
|
+
# 3. Neither the name of Django nor the names of its contributors may be used
|
|
15
|
+
# to endorse or promote products derived from this software without
|
|
16
|
+
# specific prior written permission.
|
|
17
|
+
#
|
|
18
|
+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
|
19
|
+
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
|
20
|
+
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
|
21
|
+
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
|
|
22
|
+
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
|
23
|
+
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
|
24
|
+
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
|
|
25
|
+
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
26
|
+
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
|
27
|
+
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
28
|
+
|
|
29
|
+
import asyncio.coroutines
|
|
30
|
+
import functools
|
|
31
|
+
import inspect
|
|
32
|
+
import os
|
|
33
|
+
import queue
|
|
34
|
+
import random
|
|
35
|
+
import string
|
|
36
|
+
import sys
|
|
37
|
+
import threading
|
|
38
|
+
import warnings
|
|
39
|
+
import weakref
|
|
40
|
+
from concurrent.futures import Executor, Future, ThreadPoolExecutor
|
|
41
|
+
from typing import Any, Callable, Dict, Optional
|
|
42
|
+
|
|
43
|
+
if sys.version_info >= (3, 7):
|
|
44
|
+
import contextvars
|
|
45
|
+
else:
|
|
46
|
+
contextvars = None
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class Local:
|
|
50
|
+
"""
|
|
51
|
+
A drop-in replacement for threading.locals that also works with asyncio
|
|
52
|
+
Tasks (via the current_task asyncio method), and passes locals through
|
|
53
|
+
sync_to_async and async_to_sync.
|
|
54
|
+
Specifically:
|
|
55
|
+
- Locals work per-coroutine on any thread not spawned using asgiref
|
|
56
|
+
- Locals work per-thread on any thread not spawned using asgiref
|
|
57
|
+
- Locals are shared with the parent coroutine when using sync_to_async
|
|
58
|
+
- Locals are shared with the parent thread when using async_to_sync
|
|
59
|
+
(and if that thread was launched using sync_to_async, with its parent
|
|
60
|
+
coroutine as well, with this working for indefinite levels of nesting)
|
|
61
|
+
Set thread_critical to True to not allow locals to pass from an async Task
|
|
62
|
+
to a thread it spawns. This is needed for code that truly needs
|
|
63
|
+
thread-safety, as opposed to things used for helpful context (e.g. sqlite
|
|
64
|
+
does not like being called from a different thread to the one it is from).
|
|
65
|
+
Thread-critical code will still be differentiated per-Task within a thread
|
|
66
|
+
as it is expected it does not like concurrent access.
|
|
67
|
+
This doesn't use contextvars as it needs to support 3.6. Once it can support
|
|
68
|
+
3.7 only, we can then reimplement the storage more nicely.
|
|
69
|
+
"""
|
|
70
|
+
|
|
71
|
+
CLEANUP_INTERVAL = 60 # seconds
|
|
72
|
+
|
|
73
|
+
def __init__(self, thread_critical: bool = False) -> None:
|
|
74
|
+
self._thread_critical = thread_critical
|
|
75
|
+
self._thread_lock = threading.RLock()
|
|
76
|
+
self._context_refs: "weakref.WeakSet[object]" = weakref.WeakSet()
|
|
77
|
+
# Random suffixes stop accidental reuse between different Locals,
|
|
78
|
+
# though we try to force deletion as well.
|
|
79
|
+
self._attr_name = "_asgiref_local_impl_{}_{}".format(
|
|
80
|
+
id(self),
|
|
81
|
+
"".join(random.choice(string.ascii_letters) for i in range(8)),
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
def _get_context_id(self):
|
|
85
|
+
"""
|
|
86
|
+
Get the ID we should use for looking up variables
|
|
87
|
+
"""
|
|
88
|
+
# Prevent a circular reference
|
|
89
|
+
# from .sync import AsyncToSync, SyncToAsync
|
|
90
|
+
|
|
91
|
+
# First, pull the current task if we can
|
|
92
|
+
context_id = SyncToAsync.get_current_task()
|
|
93
|
+
context_is_async = True
|
|
94
|
+
# OK, let's try for a thread ID
|
|
95
|
+
if context_id is None:
|
|
96
|
+
context_id = threading.current_thread()
|
|
97
|
+
context_is_async = False
|
|
98
|
+
# If we're thread-critical, we stop here, as we can't share contexts.
|
|
99
|
+
if self._thread_critical:
|
|
100
|
+
return context_id
|
|
101
|
+
# Now, take those and see if we can resolve them through the launch maps
|
|
102
|
+
for i in range(sys.getrecursionlimit()): # noqa: B007
|
|
103
|
+
try:
|
|
104
|
+
if context_is_async:
|
|
105
|
+
# Tasks have a source thread in AsyncToSync
|
|
106
|
+
context_id = AsyncToSync.launch_map[context_id]
|
|
107
|
+
context_is_async = False
|
|
108
|
+
else:
|
|
109
|
+
# Threads have a source task in SyncToAsync
|
|
110
|
+
context_id = SyncToAsync.launch_map[context_id]
|
|
111
|
+
context_is_async = True
|
|
112
|
+
except KeyError:
|
|
113
|
+
break
|
|
114
|
+
else:
|
|
115
|
+
# Catch infinite loops (they happen if you are screwing around
|
|
116
|
+
# with AsyncToSync implementations)
|
|
117
|
+
raise RuntimeError("Infinite launch_map loops")
|
|
118
|
+
return context_id
|
|
119
|
+
|
|
120
|
+
def _get_storage(self):
|
|
121
|
+
context_obj = self._get_context_id()
|
|
122
|
+
if not hasattr(context_obj, self._attr_name):
|
|
123
|
+
setattr(context_obj, self._attr_name, {})
|
|
124
|
+
self._context_refs.add(context_obj)
|
|
125
|
+
return getattr(context_obj, self._attr_name)
|
|
126
|
+
|
|
127
|
+
def __del__(self):
|
|
128
|
+
try:
|
|
129
|
+
for context_obj in self._context_refs:
|
|
130
|
+
try:
|
|
131
|
+
delattr(context_obj, self._attr_name)
|
|
132
|
+
except AttributeError:
|
|
133
|
+
pass
|
|
134
|
+
except TypeError:
|
|
135
|
+
# WeakSet.__iter__ can crash when interpreter is shutting down due
|
|
136
|
+
# to _IterationGuard being None.
|
|
137
|
+
pass
|
|
138
|
+
|
|
139
|
+
def __getattr__(self, key):
|
|
140
|
+
with self._thread_lock:
|
|
141
|
+
storage = self._get_storage()
|
|
142
|
+
if key in storage:
|
|
143
|
+
return storage[key]
|
|
144
|
+
else:
|
|
145
|
+
raise AttributeError(f"{self!r} object has no attribute {key!r}")
|
|
146
|
+
|
|
147
|
+
def __setattr__(self, key, value):
|
|
148
|
+
if key in ("_context_refs", "_thread_critical", "_thread_lock", "_attr_name"):
|
|
149
|
+
return super().__setattr__(key, value)
|
|
150
|
+
with self._thread_lock:
|
|
151
|
+
storage = self._get_storage()
|
|
152
|
+
storage[key] = value
|
|
153
|
+
|
|
154
|
+
def __delattr__(self, key):
|
|
155
|
+
with self._thread_lock:
|
|
156
|
+
storage = self._get_storage()
|
|
157
|
+
if key in storage:
|
|
158
|
+
del storage[key]
|
|
159
|
+
else:
|
|
160
|
+
raise AttributeError(f"{self!r} object has no attribute {key!r}")
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
class _WorkItem:
|
|
164
|
+
"""
|
|
165
|
+
Represents an item needing to be run in the executor.
|
|
166
|
+
Copied from ThreadPoolExecutor (but it's private, so we're not going to rely on importing it)
|
|
167
|
+
"""
|
|
168
|
+
|
|
169
|
+
def __init__(self, future, fn, args, kwargs):
|
|
170
|
+
self.future = future
|
|
171
|
+
self.fn = fn
|
|
172
|
+
self.args = args
|
|
173
|
+
self.kwargs = kwargs
|
|
174
|
+
|
|
175
|
+
def run(self):
|
|
176
|
+
if not self.future.set_running_or_notify_cancel():
|
|
177
|
+
return
|
|
178
|
+
try:
|
|
179
|
+
result = self.fn(*self.args, **self.kwargs)
|
|
180
|
+
except BaseException as exc:
|
|
181
|
+
self.future.set_exception(exc)
|
|
182
|
+
# Break a reference cycle with the exception 'exc'
|
|
183
|
+
self = None
|
|
184
|
+
else:
|
|
185
|
+
self.future.set_result(result)
|
|
186
|
+
|
|
187
|
+
|
|
188
|
+
class CurrentThreadExecutor(Executor):
|
|
189
|
+
"""
|
|
190
|
+
An Executor that actually runs code in the thread it is instantiated in.
|
|
191
|
+
Passed to other threads running async code, so they can run sync code in
|
|
192
|
+
the thread they came from.
|
|
193
|
+
"""
|
|
194
|
+
|
|
195
|
+
def __init__(self):
|
|
196
|
+
self._work_thread = threading.current_thread()
|
|
197
|
+
self._work_queue = queue.Queue()
|
|
198
|
+
self._broken = False
|
|
199
|
+
|
|
200
|
+
def run_until_future(self, future):
|
|
201
|
+
"""
|
|
202
|
+
Runs the code in the work queue until a result is available from the future.
|
|
203
|
+
Should be run from the thread the executor is initialised in.
|
|
204
|
+
"""
|
|
205
|
+
# Check we're in the right thread
|
|
206
|
+
if threading.current_thread() != self._work_thread:
|
|
207
|
+
raise RuntimeError("You cannot run CurrentThreadExecutor from a different thread")
|
|
208
|
+
future.add_done_callback(self._work_queue.put)
|
|
209
|
+
# Keep getting and running work items until we get the future we're waiting for
|
|
210
|
+
# back via the future's done callback.
|
|
211
|
+
try:
|
|
212
|
+
while True:
|
|
213
|
+
# Get a work item and run it
|
|
214
|
+
work_item = self._work_queue.get()
|
|
215
|
+
if work_item is future:
|
|
216
|
+
return
|
|
217
|
+
work_item.run()
|
|
218
|
+
del work_item
|
|
219
|
+
finally:
|
|
220
|
+
self._broken = True
|
|
221
|
+
|
|
222
|
+
def submit(self, fn, *args, **kwargs):
|
|
223
|
+
# Check they're not submitting from the same thread
|
|
224
|
+
if threading.current_thread() == self._work_thread:
|
|
225
|
+
raise RuntimeError("You cannot submit onto CurrentThreadExecutor from its own thread")
|
|
226
|
+
# Check they're not too late or the executor errored
|
|
227
|
+
if self._broken:
|
|
228
|
+
raise RuntimeError("CurrentThreadExecutor already quit or is broken")
|
|
229
|
+
# Add to work queue
|
|
230
|
+
f = Future()
|
|
231
|
+
work_item = _WorkItem(f, fn, args, kwargs)
|
|
232
|
+
self._work_queue.put(work_item)
|
|
233
|
+
# Return the future
|
|
234
|
+
return f
|
|
235
|
+
|
|
236
|
+
|
|
237
|
+
def _restore_context(context):
|
|
238
|
+
# Check for changes in contextvars, and set them to the current
|
|
239
|
+
# context for downstream consumers
|
|
240
|
+
for cvar in context:
|
|
241
|
+
try:
|
|
242
|
+
if cvar.get() != context.get(cvar):
|
|
243
|
+
cvar.set(context.get(cvar))
|
|
244
|
+
except LookupError:
|
|
245
|
+
cvar.set(context.get(cvar))
|
|
246
|
+
|
|
247
|
+
|
|
248
|
+
def _iscoroutinefunction_or_partial(func: Any) -> bool:
|
|
249
|
+
# Python < 3.8 does not correctly determine partially wrapped
|
|
250
|
+
# coroutine functions are coroutine functions, hence the need for
|
|
251
|
+
# this to exist. Code taken from CPython.
|
|
252
|
+
if sys.version_info >= (3, 8):
|
|
253
|
+
return asyncio.iscoroutinefunction(func)
|
|
254
|
+
else:
|
|
255
|
+
while inspect.ismethod(func):
|
|
256
|
+
func = func.__func__
|
|
257
|
+
while isinstance(func, functools.partial):
|
|
258
|
+
func = func.func
|
|
259
|
+
|
|
260
|
+
return asyncio.iscoroutinefunction(func)
|
|
261
|
+
|
|
262
|
+
|
|
263
|
+
class ThreadSensitiveContext:
|
|
264
|
+
"""Async context manager to manage context for thread sensitive mode
|
|
265
|
+
This context manager controls which thread pool executor is used when in
|
|
266
|
+
thread sensitive mode. By default, a single thread pool executor is shared
|
|
267
|
+
within a process.
|
|
268
|
+
In Python 3.7+, the ThreadSensitiveContext() context manager may be used to
|
|
269
|
+
specify a thread pool per context.
|
|
270
|
+
In Python 3.6, usage of this context manager has no effect.
|
|
271
|
+
This context manager is re-entrant, so only the outer-most call to
|
|
272
|
+
ThreadSensitiveContext will set the context.
|
|
273
|
+
"""
|
|
274
|
+
|
|
275
|
+
def __init__(self):
|
|
276
|
+
self.token = None
|
|
277
|
+
|
|
278
|
+
if contextvars:
|
|
279
|
+
|
|
280
|
+
async def __aenter__(self):
|
|
281
|
+
try:
|
|
282
|
+
SyncToAsync.thread_sensitive_context.get()
|
|
283
|
+
except LookupError:
|
|
284
|
+
self.token = SyncToAsync.thread_sensitive_context.set(self)
|
|
285
|
+
|
|
286
|
+
return self
|
|
287
|
+
|
|
288
|
+
async def __aexit__(self, exc, value, tb):
|
|
289
|
+
if not self.token:
|
|
290
|
+
return
|
|
291
|
+
|
|
292
|
+
executor = SyncToAsync.context_to_thread_executor.pop(self, None)
|
|
293
|
+
if executor:
|
|
294
|
+
executor.shutdown()
|
|
295
|
+
SyncToAsync.thread_sensitive_context.reset(self.token)
|
|
296
|
+
|
|
297
|
+
else:
|
|
298
|
+
|
|
299
|
+
async def __aenter__(self):
|
|
300
|
+
return self
|
|
301
|
+
|
|
302
|
+
async def __aexit__(self, exc, value, tb):
|
|
303
|
+
pass
|
|
304
|
+
|
|
305
|
+
|
|
306
|
+
class AsyncToSync:
|
|
307
|
+
"""
|
|
308
|
+
Utility class which turns an awaitable that only works on the thread with
|
|
309
|
+
the event loop into a synchronous callable that works in a subthread.
|
|
310
|
+
If the call stack contains an async loop, the code runs there.
|
|
311
|
+
Otherwise, the code runs in a new loop in a new thread.
|
|
312
|
+
Either way, this thread then pauses and waits to run any thread_sensitive
|
|
313
|
+
code called from further down the call stack using SyncToAsync, before
|
|
314
|
+
finally exiting once the async task returns.
|
|
315
|
+
"""
|
|
316
|
+
|
|
317
|
+
# Maps launched Tasks to the threads that launched them (for locals impl)
|
|
318
|
+
launch_map: "Dict[asyncio.Task[object], threading.Thread]" = {}
|
|
319
|
+
|
|
320
|
+
# Keeps track of which CurrentThreadExecutor to use. This uses an asgiref
|
|
321
|
+
# Local, not a threadlocal, so that tasks can work out what their parent used.
|
|
322
|
+
executors = Local()
|
|
323
|
+
|
|
324
|
+
def __init__(self, awaitable, force_new_loop=False):
|
|
325
|
+
if not callable(awaitable) or not _iscoroutinefunction_or_partial(awaitable):
|
|
326
|
+
# Python does not have very reliable detection of async functions
|
|
327
|
+
# (lots of false negatives) so this is just a warning.
|
|
328
|
+
warnings.warn("async_to_sync was passed a non-async-marked callable", stacklevel=2)
|
|
329
|
+
self.awaitable = awaitable
|
|
330
|
+
try:
|
|
331
|
+
self.__self__ = self.awaitable.__self__
|
|
332
|
+
except AttributeError:
|
|
333
|
+
pass
|
|
334
|
+
if force_new_loop:
|
|
335
|
+
# They have asked that we always run in a new sub-loop.
|
|
336
|
+
self.main_event_loop = None
|
|
337
|
+
else:
|
|
338
|
+
try:
|
|
339
|
+
self.main_event_loop = asyncio.get_event_loop()
|
|
340
|
+
except RuntimeError:
|
|
341
|
+
# There's no event loop in this thread. Look for the threadlocal if
|
|
342
|
+
# we're inside SyncToAsync
|
|
343
|
+
main_event_loop_pid = getattr(SyncToAsync.threadlocal, "main_event_loop_pid", None)
|
|
344
|
+
# We make sure the parent loop is from the same process - if
|
|
345
|
+
# they've forked, this is not going to be valid any more (#194)
|
|
346
|
+
if main_event_loop_pid and main_event_loop_pid == os.getpid():
|
|
347
|
+
self.main_event_loop = getattr(SyncToAsync.threadlocal, "main_event_loop", None)
|
|
348
|
+
else:
|
|
349
|
+
self.main_event_loop = None
|
|
350
|
+
|
|
351
|
+
def __call__(self, *args, **kwargs):
|
|
352
|
+
# You can't call AsyncToSync from a thread with a running event loop
|
|
353
|
+
try:
|
|
354
|
+
event_loop = asyncio.get_event_loop()
|
|
355
|
+
except RuntimeError:
|
|
356
|
+
pass
|
|
357
|
+
else:
|
|
358
|
+
if event_loop.is_running():
|
|
359
|
+
raise RuntimeError(
|
|
360
|
+
"You cannot use AsyncToSync in the same thread as an async event loop - "
|
|
361
|
+
"just await the async function directly."
|
|
362
|
+
)
|
|
363
|
+
|
|
364
|
+
if contextvars is not None:
|
|
365
|
+
# Wrapping context in list so it can be reassigned from within
|
|
366
|
+
# `main_wrap`.
|
|
367
|
+
context = [contextvars.copy_context()]
|
|
368
|
+
else:
|
|
369
|
+
context = None
|
|
370
|
+
|
|
371
|
+
# Make a future for the return information
|
|
372
|
+
call_result = Future()
|
|
373
|
+
# Get the source thread
|
|
374
|
+
source_thread = threading.current_thread()
|
|
375
|
+
# Make a CurrentThreadExecutor we'll use to idle in this thread - we
|
|
376
|
+
# need one for every sync frame, even if there's one above us in the
|
|
377
|
+
# same thread.
|
|
378
|
+
if hasattr(self.executors, "current"):
|
|
379
|
+
old_current_executor = self.executors.current
|
|
380
|
+
else:
|
|
381
|
+
old_current_executor = None
|
|
382
|
+
current_executor = CurrentThreadExecutor()
|
|
383
|
+
self.executors.current = current_executor
|
|
384
|
+
# Use call_soon_threadsafe to schedule a synchronous callback on the
|
|
385
|
+
# main event loop's thread if it's there, otherwise make a new loop
|
|
386
|
+
# in this thread.
|
|
387
|
+
try:
|
|
388
|
+
awaitable = self.main_wrap(args, kwargs, call_result, source_thread, sys.exc_info(), context)
|
|
389
|
+
|
|
390
|
+
if not (self.main_event_loop and self.main_event_loop.is_running()):
|
|
391
|
+
# Make our own event loop - in a new thread - and run inside that.
|
|
392
|
+
loop = asyncio.new_event_loop()
|
|
393
|
+
with ThreadPoolExecutor(max_workers=1, thread_name_prefix="AsyncToSync") as loop_executor:
|
|
394
|
+
loop_future = loop_executor.submit(self._run_event_loop, loop, awaitable)
|
|
395
|
+
if current_executor:
|
|
396
|
+
# Run the CurrentThreadExecutor until the future is done
|
|
397
|
+
current_executor.run_until_future(loop_future)
|
|
398
|
+
# Wait for future and/or allow for exception propagation
|
|
399
|
+
loop_future.result()
|
|
400
|
+
else:
|
|
401
|
+
# Call it inside the existing loop
|
|
402
|
+
self.main_event_loop.call_soon_threadsafe(self.main_event_loop.create_task, awaitable)
|
|
403
|
+
if current_executor:
|
|
404
|
+
# Run the CurrentThreadExecutor until the future is done
|
|
405
|
+
current_executor.run_until_future(call_result)
|
|
406
|
+
finally:
|
|
407
|
+
# Clean up any executor we were running
|
|
408
|
+
if hasattr(self.executors, "current"):
|
|
409
|
+
del self.executors.current
|
|
410
|
+
if old_current_executor:
|
|
411
|
+
self.executors.current = old_current_executor
|
|
412
|
+
if contextvars is not None:
|
|
413
|
+
_restore_context(context[0])
|
|
414
|
+
|
|
415
|
+
# Wait for results from the future.
|
|
416
|
+
return call_result.result()
|
|
417
|
+
|
|
418
|
+
def _run_event_loop(self, loop, coro):
|
|
419
|
+
"""
|
|
420
|
+
Runs the given event loop (designed to be called in a thread).
|
|
421
|
+
"""
|
|
422
|
+
asyncio.set_event_loop(loop)
|
|
423
|
+
try:
|
|
424
|
+
loop.run_until_complete(coro)
|
|
425
|
+
finally:
|
|
426
|
+
try:
|
|
427
|
+
# mimic asyncio.run() behavior
|
|
428
|
+
# cancel unexhausted async generators
|
|
429
|
+
if sys.version_info >= (3, 7, 0):
|
|
430
|
+
tasks = asyncio.all_tasks(loop)
|
|
431
|
+
else:
|
|
432
|
+
tasks = asyncio.Task.all_tasks(loop)
|
|
433
|
+
for task in tasks:
|
|
434
|
+
task.cancel()
|
|
435
|
+
loop.run_until_complete(asyncio.gather(*tasks, return_exceptions=True))
|
|
436
|
+
for task in tasks:
|
|
437
|
+
if task.cancelled():
|
|
438
|
+
continue
|
|
439
|
+
if task.exception() is not None:
|
|
440
|
+
loop.call_exception_handler(
|
|
441
|
+
{
|
|
442
|
+
"message": "unhandled exception during loop shutdown",
|
|
443
|
+
"exception": task.exception(),
|
|
444
|
+
"task": task,
|
|
445
|
+
}
|
|
446
|
+
)
|
|
447
|
+
if hasattr(loop, "shutdown_asyncgens"):
|
|
448
|
+
loop.run_until_complete(loop.shutdown_asyncgens())
|
|
449
|
+
finally:
|
|
450
|
+
loop.close()
|
|
451
|
+
asyncio.set_event_loop(self.main_event_loop)
|
|
452
|
+
|
|
453
|
+
def __get__(self, parent, objtype):
|
|
454
|
+
"""
|
|
455
|
+
Include self for methods
|
|
456
|
+
"""
|
|
457
|
+
func = functools.partial(self.__call__, parent)
|
|
458
|
+
return functools.update_wrapper(func, self.awaitable)
|
|
459
|
+
|
|
460
|
+
async def main_wrap(self, args, kwargs, call_result, source_thread, exc_info, context):
|
|
461
|
+
"""
|
|
462
|
+
Wraps the awaitable with something that puts the result into the
|
|
463
|
+
result/exception future.
|
|
464
|
+
"""
|
|
465
|
+
if context is not None:
|
|
466
|
+
_restore_context(context[0])
|
|
467
|
+
|
|
468
|
+
current_task = SyncToAsync.get_current_task()
|
|
469
|
+
self.launch_map[current_task] = source_thread
|
|
470
|
+
try:
|
|
471
|
+
# If we have an exception, run the function inside the except block
|
|
472
|
+
# after raising it so exc_info is correctly populated.
|
|
473
|
+
if exc_info[1]:
|
|
474
|
+
try:
|
|
475
|
+
raise exc_info[1]
|
|
476
|
+
except BaseException:
|
|
477
|
+
result = await self.awaitable(*args, **kwargs)
|
|
478
|
+
else:
|
|
479
|
+
result = await self.awaitable(*args, **kwargs)
|
|
480
|
+
except BaseException as e:
|
|
481
|
+
call_result.set_exception(e)
|
|
482
|
+
else:
|
|
483
|
+
call_result.set_result(result)
|
|
484
|
+
finally:
|
|
485
|
+
del self.launch_map[current_task]
|
|
486
|
+
|
|
487
|
+
if context is not None:
|
|
488
|
+
context[0] = contextvars.copy_context()
|
|
489
|
+
|
|
490
|
+
|
|
491
|
+
class SyncToAsync:
|
|
492
|
+
"""
|
|
493
|
+
Utility class which turns a synchronous callable into an awaitable that
|
|
494
|
+
runs in a threadpool. It also sets a threadlocal inside the thread so
|
|
495
|
+
calls to AsyncToSync can escape it.
|
|
496
|
+
If thread_sensitive is passed, the code will run in the same thread as any
|
|
497
|
+
outer code. This is needed for underlying Python code that is not
|
|
498
|
+
threadsafe (for example, code which handles SQLite database connections).
|
|
499
|
+
If the outermost program is async (i.e. SyncToAsync is outermost), then
|
|
500
|
+
this will be a dedicated single sub-thread that all sync code runs in,
|
|
501
|
+
one after the other. If the outermost program is sync (i.e. AsyncToSync is
|
|
502
|
+
outermost), this will just be the main thread. This is achieved by idling
|
|
503
|
+
with a CurrentThreadExecutor while AsyncToSync is blocking its sync parent,
|
|
504
|
+
rather than just blocking.
|
|
505
|
+
If executor is passed in, that will be used instead of the loop's default executor.
|
|
506
|
+
In order to pass in an executor, thread_sensitive must be set to False, otherwise
|
|
507
|
+
a TypeError will be raised.
|
|
508
|
+
"""
|
|
509
|
+
|
|
510
|
+
# If they've set ASGI_THREADS, update the default asyncio executor for now
|
|
511
|
+
if "ASGI_THREADS" in os.environ:
|
|
512
|
+
loop = asyncio.get_event_loop()
|
|
513
|
+
loop.set_default_executor(
|
|
514
|
+
ThreadPoolExecutor(max_workers=int(os.environ["ASGI_THREADS"]), thread_name_prefix="SyncToAsync_ASGI")
|
|
515
|
+
)
|
|
516
|
+
|
|
517
|
+
# Maps launched threads to the coroutines that spawned them
|
|
518
|
+
launch_map: "Dict[threading.Thread, asyncio.Task[object]]" = {}
|
|
519
|
+
|
|
520
|
+
# Storage for main event loop references
|
|
521
|
+
threadlocal = threading.local()
|
|
522
|
+
|
|
523
|
+
# Single-thread executor for thread-sensitive code
|
|
524
|
+
single_thread_executor = ThreadPoolExecutor(max_workers=1, thread_name_prefix="SingletonAsyncToSync")
|
|
525
|
+
|
|
526
|
+
# Maintain a contextvar for the current execution context. Optionally used
|
|
527
|
+
# for thread sensitive mode.
|
|
528
|
+
if sys.version_info >= (3, 7):
|
|
529
|
+
thread_sensitive_context: "contextvars.ContextVar[str]" = contextvars.ContextVar("thread_sensitive_context")
|
|
530
|
+
else:
|
|
531
|
+
thread_sensitive_context: None = None
|
|
532
|
+
|
|
533
|
+
# Maintaining a weak reference to the context ensures that thread pools are
|
|
534
|
+
# erased once the context goes out of scope. This terminates the thread pool.
|
|
535
|
+
context_to_thread_executor: "weakref.WeakKeyDictionary[object, ThreadPoolExecutor]" = weakref.WeakKeyDictionary()
|
|
536
|
+
|
|
537
|
+
def __init__(
|
|
538
|
+
self,
|
|
539
|
+
func: Callable[..., Any],
|
|
540
|
+
thread_sensitive: bool = True,
|
|
541
|
+
executor: Optional["ThreadPoolExecutor"] = None,
|
|
542
|
+
) -> None:
|
|
543
|
+
if not callable(func) or _iscoroutinefunction_or_partial(func):
|
|
544
|
+
raise TypeError("sync_to_async can only be applied to sync functions.")
|
|
545
|
+
self.func = func
|
|
546
|
+
functools.update_wrapper(self, func)
|
|
547
|
+
self._thread_sensitive = thread_sensitive
|
|
548
|
+
self._is_coroutine = asyncio.coroutines._is_coroutine # type: ignore
|
|
549
|
+
if thread_sensitive and executor is not None:
|
|
550
|
+
raise TypeError("executor must not be set when thread_sensitive is True")
|
|
551
|
+
self._executor = executor
|
|
552
|
+
try:
|
|
553
|
+
self.__self__ = func.__self__ # type: ignore
|
|
554
|
+
except AttributeError:
|
|
555
|
+
pass
|
|
556
|
+
|
|
557
|
+
async def __call__(self, *args, **kwargs):
|
|
558
|
+
loop = asyncio.get_event_loop()
|
|
559
|
+
|
|
560
|
+
# Work out what thread to run the code in
|
|
561
|
+
if self._thread_sensitive:
|
|
562
|
+
if hasattr(AsyncToSync.executors, "current"):
|
|
563
|
+
# If we have a parent sync thread above somewhere, use that
|
|
564
|
+
executor = AsyncToSync.executors.current
|
|
565
|
+
elif self.thread_sensitive_context and self.thread_sensitive_context.get(None):
|
|
566
|
+
# If we have a way of retrieving the current context, attempt
|
|
567
|
+
# to use a per-context thread pool executor
|
|
568
|
+
thread_sensitive_context = self.thread_sensitive_context.get()
|
|
569
|
+
|
|
570
|
+
if thread_sensitive_context in self.context_to_thread_executor:
|
|
571
|
+
# Re-use thread executor in current context
|
|
572
|
+
executor = self.context_to_thread_executor[thread_sensitive_context]
|
|
573
|
+
else:
|
|
574
|
+
# Create new thread executor in current context
|
|
575
|
+
executor = ThreadPoolExecutor(max_workers=1, thread_name_prefix="ContextAsyncToSync")
|
|
576
|
+
self.context_to_thread_executor[thread_sensitive_context] = executor
|
|
577
|
+
else:
|
|
578
|
+
# Otherwise, we run it in a fixed single thread
|
|
579
|
+
executor = self.single_thread_executor
|
|
580
|
+
else:
|
|
581
|
+
# Use the passed in executor, or the loop's default if it is None
|
|
582
|
+
executor = self._executor
|
|
583
|
+
|
|
584
|
+
if contextvars is not None:
|
|
585
|
+
context = contextvars.copy_context()
|
|
586
|
+
child = functools.partial(self.func, *args, **kwargs)
|
|
587
|
+
func = context.run
|
|
588
|
+
args = (child,)
|
|
589
|
+
kwargs = {}
|
|
590
|
+
else:
|
|
591
|
+
func = self.func
|
|
592
|
+
|
|
593
|
+
# Run the code in the right thread
|
|
594
|
+
future = loop.run_in_executor(
|
|
595
|
+
executor,
|
|
596
|
+
functools.partial(
|
|
597
|
+
self.thread_handler,
|
|
598
|
+
loop,
|
|
599
|
+
self.get_current_task(),
|
|
600
|
+
sys.exc_info(),
|
|
601
|
+
func,
|
|
602
|
+
*args,
|
|
603
|
+
**kwargs,
|
|
604
|
+
),
|
|
605
|
+
)
|
|
606
|
+
ret = await asyncio.wait_for(future, timeout=None)
|
|
607
|
+
|
|
608
|
+
if contextvars is not None:
|
|
609
|
+
_restore_context(context)
|
|
610
|
+
|
|
611
|
+
return ret
|
|
612
|
+
|
|
613
|
+
def __get__(self, parent, objtype):
|
|
614
|
+
"""
|
|
615
|
+
Include self for methods
|
|
616
|
+
"""
|
|
617
|
+
return functools.partial(self.__call__, parent)
|
|
618
|
+
|
|
619
|
+
def thread_handler(self, loop, source_task, exc_info, func, *args, **kwargs):
|
|
620
|
+
"""
|
|
621
|
+
Wraps the sync application with exception handling.
|
|
622
|
+
"""
|
|
623
|
+
# Set the threadlocal for AsyncToSync
|
|
624
|
+
self.threadlocal.main_event_loop = loop
|
|
625
|
+
self.threadlocal.main_event_loop_pid = os.getpid()
|
|
626
|
+
# Set the task mapping (used for the locals module)
|
|
627
|
+
current_thread = threading.current_thread()
|
|
628
|
+
if AsyncToSync.launch_map.get(source_task) == current_thread:
|
|
629
|
+
# Our parent task was launched from this same thread, so don't make
|
|
630
|
+
# a launch map entry - let it shortcut over us! (and stop infinite loops)
|
|
631
|
+
parent_set = False
|
|
632
|
+
else:
|
|
633
|
+
self.launch_map[current_thread] = source_task
|
|
634
|
+
parent_set = True
|
|
635
|
+
# Run the function
|
|
636
|
+
try:
|
|
637
|
+
# If we have an exception, run the function inside the except block
|
|
638
|
+
# after raising it so exc_info is correctly populated.
|
|
639
|
+
if exc_info[1]:
|
|
640
|
+
try:
|
|
641
|
+
raise exc_info[1]
|
|
642
|
+
except BaseException:
|
|
643
|
+
return func(*args, **kwargs)
|
|
644
|
+
else:
|
|
645
|
+
return func(*args, **kwargs)
|
|
646
|
+
finally:
|
|
647
|
+
# Only delete the launch_map parent if we set it, otherwise it is
|
|
648
|
+
# from someone else.
|
|
649
|
+
if parent_set:
|
|
650
|
+
del self.launch_map[current_thread]
|
|
651
|
+
|
|
652
|
+
@staticmethod
|
|
653
|
+
def get_current_task():
|
|
654
|
+
"""
|
|
655
|
+
Cross-version implementation of asyncio.current_task()
|
|
656
|
+
Returns None if there is no task.
|
|
657
|
+
"""
|
|
658
|
+
try:
|
|
659
|
+
if hasattr(asyncio, "current_task"):
|
|
660
|
+
# Python 3.7 and up
|
|
661
|
+
return asyncio.current_task()
|
|
662
|
+
else:
|
|
663
|
+
# Python 3.6
|
|
664
|
+
return asyncio.Task.current_task()
|
|
665
|
+
except RuntimeError:
|
|
666
|
+
return None
|
|
667
|
+
|
|
668
|
+
|
|
669
|
+
# Lowercase aliases (and decorator friendliness)
|
|
670
|
+
async_to_sync = AsyncToSync
|
|
671
|
+
|
|
672
|
+
|
|
673
|
+
def sync_to_async(
|
|
674
|
+
func: Callable[..., Any],
|
|
675
|
+
thread_sensitive: bool = True,
|
|
676
|
+
executor: Optional["ThreadPoolExecutor"] = None,
|
|
677
|
+
) -> SyncToAsync:
|
|
678
|
+
return SyncToAsync(
|
|
679
|
+
func,
|
|
680
|
+
thread_sensitive=thread_sensitive,
|
|
681
|
+
executor=executor,
|
|
682
|
+
)
|