@vercel/python 5.0.7 → 5.0.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +61 -0
- package/package.json +2 -2
- package/vc_init.py +107 -228
package/dist/index.js
CHANGED
|
@@ -3224,6 +3224,47 @@ function isFastapiEntrypoint(file) {
|
|
|
3224
3224
|
return false;
|
|
3225
3225
|
}
|
|
3226
3226
|
}
|
|
3227
|
+
var FLASK_ENTRYPOINT_FILENAMES = ["app", "index", "server", "main"];
|
|
3228
|
+
var FLASK_ENTRYPOINT_DIRS = ["", "src", "app"];
|
|
3229
|
+
var FLASK_CONTENT_REGEX = /(from\s+flask\s+import\s+Flask|import\s+flask|Flask\s*\()/;
|
|
3230
|
+
var FLASK_CANDIDATE_ENTRYPOINTS = FLASK_ENTRYPOINT_FILENAMES.flatMap(
|
|
3231
|
+
(filename) => FLASK_ENTRYPOINT_DIRS.map(
|
|
3232
|
+
(dir) => import_path2.posix.join(dir, `${filename}.py`)
|
|
3233
|
+
)
|
|
3234
|
+
);
|
|
3235
|
+
function isFlaskEntrypoint(file) {
|
|
3236
|
+
try {
|
|
3237
|
+
const fsPath = file.fsPath;
|
|
3238
|
+
if (!fsPath)
|
|
3239
|
+
return false;
|
|
3240
|
+
const contents = import_fs2.default.readFileSync(fsPath, "utf8");
|
|
3241
|
+
return FLASK_CONTENT_REGEX.test(contents);
|
|
3242
|
+
} catch {
|
|
3243
|
+
return false;
|
|
3244
|
+
}
|
|
3245
|
+
}
|
|
3246
|
+
async function detectFlaskEntrypoint(workPath, configuredEntrypoint) {
|
|
3247
|
+
const entry = configuredEntrypoint.endsWith(".py") ? configuredEntrypoint : `${configuredEntrypoint}.py`;
|
|
3248
|
+
try {
|
|
3249
|
+
const fsFiles = await (0, import_build_utils3.glob)("**", workPath);
|
|
3250
|
+
if (fsFiles[entry])
|
|
3251
|
+
return entry;
|
|
3252
|
+
const candidates = FLASK_CANDIDATE_ENTRYPOINTS.filter(
|
|
3253
|
+
(c) => !!fsFiles[c]
|
|
3254
|
+
);
|
|
3255
|
+
if (candidates.length > 0) {
|
|
3256
|
+
const flaskEntrypoint = candidates.find(
|
|
3257
|
+
(c) => isFlaskEntrypoint(fsFiles[c])
|
|
3258
|
+
) || candidates[0];
|
|
3259
|
+
(0, import_build_utils3.debug)(`Detected Flask entrypoint: ${flaskEntrypoint}`);
|
|
3260
|
+
return flaskEntrypoint;
|
|
3261
|
+
}
|
|
3262
|
+
return null;
|
|
3263
|
+
} catch {
|
|
3264
|
+
(0, import_build_utils3.debug)("Failed to discover entrypoint for Flask");
|
|
3265
|
+
return null;
|
|
3266
|
+
}
|
|
3267
|
+
}
|
|
3227
3268
|
async function detectFastapiEntrypoint(workPath, configuredEntrypoint) {
|
|
3228
3269
|
const entry = configuredEntrypoint.endsWith(".py") ? configuredEntrypoint : `${configuredEntrypoint}.py`;
|
|
3229
3270
|
try {
|
|
@@ -3645,6 +3686,20 @@ var build = async ({
|
|
|
3645
3686
|
message: `No FastAPI entrypoint found. Searched for: ${searchedList}`
|
|
3646
3687
|
});
|
|
3647
3688
|
}
|
|
3689
|
+
} else if (!fsFiles[entrypoint] && config?.framework === "flask") {
|
|
3690
|
+
const detected = await detectFlaskEntrypoint(workPath, entrypoint);
|
|
3691
|
+
if (detected) {
|
|
3692
|
+
(0, import_build_utils5.debug)(
|
|
3693
|
+
`Resolved Python entrypoint to "${detected}" (configured "${entrypoint}" not found).`
|
|
3694
|
+
);
|
|
3695
|
+
entrypoint = detected;
|
|
3696
|
+
} else {
|
|
3697
|
+
const searchedList = FLASK_CANDIDATE_ENTRYPOINTS.join(", ");
|
|
3698
|
+
throw new import_build_utils5.NowBuildError({
|
|
3699
|
+
code: "FLASK_ENTRYPOINT_NOT_FOUND",
|
|
3700
|
+
message: `No Flask entrypoint found. Searched for: ${searchedList}`
|
|
3701
|
+
});
|
|
3702
|
+
}
|
|
3648
3703
|
}
|
|
3649
3704
|
const entryDirectory = (0, import_path5.dirname)(entrypoint);
|
|
3650
3705
|
const hasReqLocal = !!fsFiles[(0, import_path5.join)(entryDirectory, "requirements.txt")];
|
|
@@ -3915,6 +3970,12 @@ var shouldServe = (opts) => {
|
|
|
3915
3970
|
return false;
|
|
3916
3971
|
}
|
|
3917
3972
|
return true;
|
|
3973
|
+
} else if (framework === "flask") {
|
|
3974
|
+
const requestPath = opts.requestPath.replace(/\/$/, "");
|
|
3975
|
+
if (requestPath.startsWith("api") && opts.hasMatched) {
|
|
3976
|
+
return false;
|
|
3977
|
+
}
|
|
3978
|
+
return true;
|
|
3918
3979
|
}
|
|
3919
3980
|
return defaultShouldServe(opts);
|
|
3920
3981
|
};
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@vercel/python",
|
|
3
|
-
"version": "5.0.
|
|
3
|
+
"version": "5.0.9",
|
|
4
4
|
"main": "./dist/index.js",
|
|
5
5
|
"license": "Apache-2.0",
|
|
6
6
|
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/python",
|
|
@@ -20,7 +20,7 @@
|
|
|
20
20
|
"@types/jest": "27.4.1",
|
|
21
21
|
"@types/node": "14.18.33",
|
|
22
22
|
"@types/which": "3.0.0",
|
|
23
|
-
"@vercel/build-utils": "12.1.
|
|
23
|
+
"@vercel/build-utils": "12.1.2",
|
|
24
24
|
"cross-env": "7.0.3",
|
|
25
25
|
"execa": "^1.0.0",
|
|
26
26
|
"fs-extra": "11.1.1",
|
package/vc_init.py
CHANGED
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
from __future__ import annotations
|
|
2
1
|
import sys
|
|
3
2
|
import os
|
|
4
3
|
import site
|
|
@@ -6,19 +5,9 @@ import importlib
|
|
|
6
5
|
import base64
|
|
7
6
|
import json
|
|
8
7
|
import inspect
|
|
9
|
-
import threading
|
|
10
|
-
import asyncio
|
|
11
|
-
import http
|
|
12
|
-
import time
|
|
13
8
|
from importlib import util
|
|
14
|
-
from http.server import BaseHTTPRequestHandler
|
|
9
|
+
from http.server import BaseHTTPRequestHandler
|
|
15
10
|
import socket
|
|
16
|
-
import functools
|
|
17
|
-
import logging
|
|
18
|
-
import builtins
|
|
19
|
-
from typing import Callable, Literal
|
|
20
|
-
import contextvars
|
|
21
|
-
import io
|
|
22
11
|
|
|
23
12
|
_here = os.path.dirname(__file__)
|
|
24
13
|
_vendor_rel = '__VC_HANDLER_VENDOR_DIR'
|
|
@@ -62,69 +51,72 @@ def format_headers(headers, decode=False):
|
|
|
62
51
|
keyToList[key].append(value)
|
|
63
52
|
return keyToList
|
|
64
53
|
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
54
|
+
if 'VERCEL_IPC_PATH' in os.environ:
|
|
55
|
+
from http.server import ThreadingHTTPServer
|
|
56
|
+
import http
|
|
57
|
+
import time
|
|
58
|
+
import contextvars
|
|
59
|
+
import functools
|
|
60
|
+
import builtins
|
|
61
|
+
import logging
|
|
71
62
|
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
except Exception:
|
|
76
|
-
try:
|
|
77
|
-
message = f"{record.msg}"
|
|
78
|
-
except Exception:
|
|
79
|
-
message = ""
|
|
80
|
-
|
|
81
|
-
if record.levelno >= logging.CRITICAL:
|
|
82
|
-
level = "fatal"
|
|
83
|
-
elif record.levelno >= logging.ERROR:
|
|
84
|
-
level = "error"
|
|
85
|
-
elif record.levelno >= logging.WARNING:
|
|
86
|
-
level = "warn"
|
|
87
|
-
elif record.levelno >= logging.INFO:
|
|
88
|
-
level = "info"
|
|
89
|
-
else:
|
|
90
|
-
level = "debug"
|
|
63
|
+
start_time = time.time()
|
|
64
|
+
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
|
|
65
|
+
sock.connect(os.getenv("VERCEL_IPC_PATH", ""))
|
|
91
66
|
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
ctx = self._context_getter() if self._context_getter is not None else None
|
|
95
|
-
except Exception:
|
|
96
|
-
ctx = None
|
|
67
|
+
send_message = lambda message: sock.sendall((json.dumps(message) + '\0').encode())
|
|
68
|
+
storage = contextvars.ContextVar('storage', default=None)
|
|
97
69
|
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
"payload": {
|
|
103
|
-
"context": {
|
|
104
|
-
"invocationId": ctx['invocationId'],
|
|
105
|
-
"requestId": ctx['requestId'],
|
|
106
|
-
},
|
|
107
|
-
"message": base64.b64encode(message.encode()).decode(),
|
|
108
|
-
"level": level,
|
|
109
|
-
}
|
|
110
|
-
})
|
|
111
|
-
except Exception:
|
|
112
|
-
pass
|
|
113
|
-
else:
|
|
114
|
-
try:
|
|
115
|
-
sys.stdout.write(message + "\n")
|
|
116
|
-
except Exception:
|
|
117
|
-
pass
|
|
70
|
+
# Override urlopen from urllib3 (& requests) to send Request Metrics
|
|
71
|
+
try:
|
|
72
|
+
import urllib3
|
|
73
|
+
from urllib.parse import urlparse
|
|
118
74
|
|
|
75
|
+
def timed_request(func):
|
|
76
|
+
fetchId = 0
|
|
77
|
+
@functools.wraps(func)
|
|
78
|
+
def wrapper(self, method, url, *args, **kwargs):
|
|
79
|
+
nonlocal fetchId
|
|
80
|
+
fetchId += 1
|
|
81
|
+
start_time = int(time.time() * 1000)
|
|
82
|
+
result = func(self, method, url, *args, **kwargs)
|
|
83
|
+
elapsed_time = int(time.time() * 1000) - start_time
|
|
84
|
+
parsed_url = urlparse(url)
|
|
85
|
+
context = storage.get()
|
|
86
|
+
if context is not None:
|
|
87
|
+
send_message({
|
|
88
|
+
"type": "metric",
|
|
89
|
+
"payload": {
|
|
90
|
+
"context": {
|
|
91
|
+
"invocationId": context['invocationId'],
|
|
92
|
+
"requestId": context['requestId'],
|
|
93
|
+
},
|
|
94
|
+
"type": "fetch-metric",
|
|
95
|
+
"payload": {
|
|
96
|
+
"pathname": parsed_url.path,
|
|
97
|
+
"search": parsed_url.query,
|
|
98
|
+
"start": start_time,
|
|
99
|
+
"duration": elapsed_time,
|
|
100
|
+
"host": parsed_url.hostname or self.host,
|
|
101
|
+
"statusCode": result.status,
|
|
102
|
+
"method": method,
|
|
103
|
+
"id": fetchId
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
})
|
|
107
|
+
return result
|
|
108
|
+
return wrapper
|
|
109
|
+
urllib3.connectionpool.HTTPConnectionPool.urlopen = timed_request(urllib3.connectionpool.HTTPConnectionPool.urlopen)
|
|
110
|
+
except:
|
|
111
|
+
pass
|
|
119
112
|
|
|
120
|
-
def setup_logging(send_message: Callable[[dict], None], storage: contextvars.ContextVar[dict | None]):
|
|
121
113
|
# Override sys.stdout and sys.stderr to map logs to the correct request
|
|
122
114
|
class StreamWrapper:
|
|
123
|
-
def __init__(self, stream
|
|
115
|
+
def __init__(self, stream, stream_name):
|
|
124
116
|
self.stream = stream
|
|
125
117
|
self.stream_name = stream_name
|
|
126
118
|
|
|
127
|
-
def write(self, message
|
|
119
|
+
def write(self, message):
|
|
128
120
|
context = storage.get()
|
|
129
121
|
if context is not None:
|
|
130
122
|
send_message({
|
|
@@ -147,15 +139,19 @@ def setup_logging(send_message: Callable[[dict], None], storage: contextvars.Con
|
|
|
147
139
|
sys.stdout = StreamWrapper(sys.stdout, "stdout")
|
|
148
140
|
sys.stderr = StreamWrapper(sys.stderr, "stderr")
|
|
149
141
|
|
|
150
|
-
#
|
|
151
|
-
|
|
152
|
-
def logging_wrapper(func: Callable[..., None], level: str = "info") -> Callable[..., None]:
|
|
142
|
+
# Override the global print to log to stdout
|
|
143
|
+
def print_wrapper(func):
|
|
153
144
|
@functools.wraps(func)
|
|
154
145
|
def wrapper(*args, **kwargs):
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
146
|
+
sys.stdout.write(' '.join(map(str, args)) + '\n')
|
|
147
|
+
return wrapper
|
|
148
|
+
builtins.print = print_wrapper(builtins.print)
|
|
149
|
+
|
|
150
|
+
# Override logging to maps logs to the correct request
|
|
151
|
+
def logging_wrapper(func, level="info"):
|
|
152
|
+
@functools.wraps(func)
|
|
153
|
+
def wrapper(*args, **kwargs):
|
|
154
|
+
context = storage.get()
|
|
159
155
|
if context is not None:
|
|
160
156
|
send_message({
|
|
161
157
|
"type": "log",
|
|
@@ -172,77 +168,12 @@ def setup_logging(send_message: Callable[[dict], None], storage: contextvars.Con
|
|
|
172
168
|
func(*args, **kwargs)
|
|
173
169
|
return wrapper
|
|
174
170
|
|
|
175
|
-
logging.basicConfig(level=logging.INFO
|
|
176
|
-
logging.debug = logging_wrapper(logging.debug
|
|
177
|
-
logging.info = logging_wrapper(logging.info
|
|
171
|
+
logging.basicConfig(level=logging.INFO)
|
|
172
|
+
logging.debug = logging_wrapper(logging.debug)
|
|
173
|
+
logging.info = logging_wrapper(logging.info)
|
|
178
174
|
logging.warning = logging_wrapper(logging.warning, "warn")
|
|
179
175
|
logging.error = logging_wrapper(logging.error, "error")
|
|
180
|
-
logging.
|
|
181
|
-
logging.critical = logging_wrapper(logging.critical, "fatal")
|
|
182
|
-
|
|
183
|
-
# Ensure built-in print funnels through stdout wrapper so prints are
|
|
184
|
-
# attributed to the current request context.
|
|
185
|
-
def print_wrapper(func: Callable[..., None]) -> Callable[..., None]:
|
|
186
|
-
@functools.wraps(func)
|
|
187
|
-
def wrapper(*args, **kwargs):
|
|
188
|
-
sys.stdout.write(' '.join(map(str, args)) + '\n')
|
|
189
|
-
return wrapper
|
|
190
|
-
|
|
191
|
-
builtins.print = print_wrapper(builtins.print)
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
if 'VERCEL_IPC_PATH' in os.environ:
|
|
195
|
-
start_time = time.time()
|
|
196
|
-
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
|
|
197
|
-
sock.connect(os.getenv("VERCEL_IPC_PATH", ""))
|
|
198
|
-
|
|
199
|
-
send_message = lambda message: sock.sendall((json.dumps(message) + '\0').encode())
|
|
200
|
-
storage = contextvars.ContextVar('storage', default=None)
|
|
201
|
-
|
|
202
|
-
# Override urlopen from urllib3 (& requests) to send Request Metrics
|
|
203
|
-
try:
|
|
204
|
-
import urllib3
|
|
205
|
-
from urllib.parse import urlparse
|
|
206
|
-
|
|
207
|
-
def timed_request(func):
|
|
208
|
-
fetchId = 0
|
|
209
|
-
@functools.wraps(func)
|
|
210
|
-
def wrapper(self, method, url, *args, **kwargs):
|
|
211
|
-
nonlocal fetchId
|
|
212
|
-
fetchId += 1
|
|
213
|
-
start_time = int(time.time() * 1000)
|
|
214
|
-
result = func(self, method, url, *args, **kwargs)
|
|
215
|
-
elapsed_time = int(time.time() * 1000) - start_time
|
|
216
|
-
parsed_url = urlparse(url)
|
|
217
|
-
context = storage.get()
|
|
218
|
-
if context is not None:
|
|
219
|
-
send_message({
|
|
220
|
-
"type": "metric",
|
|
221
|
-
"payload": {
|
|
222
|
-
"context": {
|
|
223
|
-
"invocationId": context['invocationId'],
|
|
224
|
-
"requestId": context['requestId'],
|
|
225
|
-
},
|
|
226
|
-
"type": "fetch-metric",
|
|
227
|
-
"payload": {
|
|
228
|
-
"pathname": parsed_url.path,
|
|
229
|
-
"search": parsed_url.query,
|
|
230
|
-
"start": start_time,
|
|
231
|
-
"duration": elapsed_time,
|
|
232
|
-
"host": parsed_url.hostname or self.host,
|
|
233
|
-
"statusCode": result.status,
|
|
234
|
-
"method": method,
|
|
235
|
-
"id": fetchId
|
|
236
|
-
}
|
|
237
|
-
}
|
|
238
|
-
})
|
|
239
|
-
return result
|
|
240
|
-
return wrapper
|
|
241
|
-
urllib3.connectionpool.HTTPConnectionPool.urlopen = timed_request(urllib3.connectionpool.HTTPConnectionPool.urlopen)
|
|
242
|
-
except:
|
|
243
|
-
pass
|
|
244
|
-
|
|
245
|
-
setup_logging(send_message, storage)
|
|
176
|
+
logging.critical = logging_wrapper(logging.critical, "error")
|
|
246
177
|
|
|
247
178
|
class BaseHandler(BaseHTTPRequestHandler):
|
|
248
179
|
# Re-implementation of BaseHTTPRequestHandler's log_message method to
|
|
@@ -325,7 +256,6 @@ if 'VERCEL_IPC_PATH' in os.environ:
|
|
|
325
256
|
method()
|
|
326
257
|
self.wfile.flush()
|
|
327
258
|
elif 'app' in __vc_variables:
|
|
328
|
-
# WSGI
|
|
329
259
|
if (
|
|
330
260
|
not inspect.iscoroutinefunction(__vc_module.app) and
|
|
331
261
|
not inspect.iscoroutinefunction(__vc_module.app.__call__)
|
|
@@ -391,10 +321,10 @@ if 'VERCEL_IPC_PATH' in os.environ:
|
|
|
391
321
|
finally:
|
|
392
322
|
if hasattr(response, 'close'):
|
|
393
323
|
response.close()
|
|
394
|
-
# ASGI
|
|
395
324
|
else:
|
|
396
325
|
from urllib.parse import urlparse
|
|
397
326
|
from io import BytesIO
|
|
327
|
+
import asyncio
|
|
398
328
|
|
|
399
329
|
app = __vc_module.app
|
|
400
330
|
|
|
@@ -409,7 +339,6 @@ if 'VERCEL_IPC_PATH' in os.environ:
|
|
|
409
339
|
headers_encoded.append([k.lower().encode(), [i.encode() for i in v]])
|
|
410
340
|
else:
|
|
411
341
|
headers_encoded.append([k.lower().encode(), v.encode()])
|
|
412
|
-
|
|
413
342
|
scope = {
|
|
414
343
|
'server': (self.headers.get('host', 'lambda'), self.headers.get('x-forwarded-port', 80)),
|
|
415
344
|
'client': (self.headers.get(
|
|
@@ -432,91 +361,41 @@ if 'VERCEL_IPC_PATH' in os.environ:
|
|
|
432
361
|
else:
|
|
433
362
|
body = b''
|
|
434
363
|
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
364
|
+
if _use_legacy_asyncio:
|
|
365
|
+
loop = asyncio.new_event_loop()
|
|
366
|
+
app_queue = asyncio.Queue(loop=loop)
|
|
367
|
+
else:
|
|
368
|
+
app_queue = asyncio.Queue()
|
|
369
|
+
app_queue.put_nowait({'type': 'http.request', 'body': body, 'more_body': False})
|
|
370
|
+
|
|
371
|
+
# Prepare ASGI receive function
|
|
372
|
+
async def receive():
|
|
373
|
+
message = await app_queue.get()
|
|
374
|
+
return message
|
|
375
|
+
|
|
376
|
+
# Prepare ASGI send function
|
|
377
|
+
response_started = False
|
|
378
|
+
async def send(event):
|
|
379
|
+
nonlocal response_started
|
|
380
|
+
if event['type'] == 'http.response.start':
|
|
381
|
+
self.send_response(event['status'])
|
|
382
|
+
if 'headers' in event:
|
|
383
|
+
for name, value in event['headers']:
|
|
384
|
+
self.send_header(name.decode(), value.decode())
|
|
385
|
+
self.end_headers()
|
|
386
|
+
response_started = True
|
|
387
|
+
elif event['type'] == 'http.response.body':
|
|
388
|
+
self.wfile.write(event['body'])
|
|
389
|
+
if not event.get('more_body', False):
|
|
390
|
+
self.wfile.flush()
|
|
440
391
|
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
response_started = False
|
|
449
|
-
try:
|
|
450
|
-
async def runner():
|
|
451
|
-
# Per-request app queue
|
|
452
|
-
if _use_legacy_asyncio:
|
|
453
|
-
loop = asyncio.get_running_loop()
|
|
454
|
-
app_queue = asyncio.Queue(loop=loop)
|
|
455
|
-
else:
|
|
456
|
-
app_queue = asyncio.Queue()
|
|
457
|
-
|
|
458
|
-
await app_queue.put({'type': 'http.request', 'body': body, 'more_body': False})
|
|
459
|
-
|
|
460
|
-
async def receive():
|
|
461
|
-
message = await app_queue.get()
|
|
462
|
-
return message
|
|
463
|
-
|
|
464
|
-
async def send(event):
|
|
465
|
-
nonlocal response_started
|
|
466
|
-
if event['type'] == 'http.response.start':
|
|
467
|
-
self.send_response(event['status'])
|
|
468
|
-
if 'headers' in event:
|
|
469
|
-
for name, value in event['headers']:
|
|
470
|
-
self.send_header(name.decode(), value.decode())
|
|
471
|
-
self.end_headers()
|
|
472
|
-
response_started = True
|
|
473
|
-
elif event['type'] == 'http.response.body':
|
|
474
|
-
# Stream body as it is produced; flush on completion
|
|
475
|
-
body_bytes = event.get('body', b'') or b''
|
|
476
|
-
if body_bytes:
|
|
477
|
-
self.wfile.write(body_bytes)
|
|
478
|
-
if not event.get('more_body', False):
|
|
479
|
-
try:
|
|
480
|
-
self.wfile.flush()
|
|
481
|
-
finally:
|
|
482
|
-
response_done.set()
|
|
483
|
-
try:
|
|
484
|
-
app_queue.put_nowait({'type': 'http.disconnect'})
|
|
485
|
-
except Exception:
|
|
486
|
-
pass
|
|
487
|
-
|
|
488
|
-
# Run ASGI app (includes background tasks)
|
|
489
|
-
asgi_instance = app(scope, receive, send)
|
|
490
|
-
await asgi_instance
|
|
491
|
-
|
|
492
|
-
asyncio.run(runner())
|
|
493
|
-
except Exception:
|
|
494
|
-
# If the app raised before starting the response, synthesize a 500
|
|
495
|
-
try:
|
|
496
|
-
if not response_started:
|
|
497
|
-
self.send_response(500)
|
|
498
|
-
self.end_headers()
|
|
499
|
-
try:
|
|
500
|
-
self.wfile.flush()
|
|
501
|
-
except Exception:
|
|
502
|
-
pass
|
|
503
|
-
except Exception:
|
|
504
|
-
pass
|
|
505
|
-
finally:
|
|
506
|
-
# Always unblock the waiting thread to avoid hangs
|
|
507
|
-
try:
|
|
508
|
-
response_done.set()
|
|
509
|
-
except Exception:
|
|
510
|
-
pass
|
|
511
|
-
if token is not None:
|
|
512
|
-
storage.reset(token)
|
|
513
|
-
|
|
514
|
-
# Run ASGI in background thread to allow returning after final flush
|
|
515
|
-
t = threading.Thread(target=run_asgi, daemon=True)
|
|
516
|
-
t.start()
|
|
517
|
-
|
|
518
|
-
# Wait until final body chunk has been flushed to client
|
|
519
|
-
response_done.wait()
|
|
392
|
+
# Run the ASGI application
|
|
393
|
+
asgi_instance = app(scope, receive, send)
|
|
394
|
+
if _use_legacy_asyncio:
|
|
395
|
+
asgi_task = loop.create_task(asgi_instance)
|
|
396
|
+
loop.run_until_complete(asgi_task)
|
|
397
|
+
else:
|
|
398
|
+
asyncio.run(asgi_instance)
|
|
520
399
|
|
|
521
400
|
if 'Handler' in locals():
|
|
522
401
|
server = ThreadingHTTPServer(('127.0.0.1', 0), Handler)
|