@vercel/python 5.0.8 → 5.0.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1631,7 +1631,7 @@ var require_cross_spawn = __commonJS({
1631
1631
  var cp = require("child_process");
1632
1632
  var parse2 = require_parse();
1633
1633
  var enoent = require_enoent();
1634
- function spawn3(command, args, options) {
1634
+ function spawn2(command, args, options) {
1635
1635
  const parsed = parse2(command, args, options);
1636
1636
  const spawned = cp.spawn(parsed.command, parsed.args, parsed.options);
1637
1637
  enoent.hookChildProcess(spawned, parsed);
@@ -1643,8 +1643,8 @@ var require_cross_spawn = __commonJS({
1643
1643
  result.error = result.error || enoent.verifyENOENTSync(result.status, parsed);
1644
1644
  return result;
1645
1645
  }
1646
- module2.exports = spawn3;
1647
- module2.exports.spawn = spawn3;
1646
+ module2.exports = spawn2;
1647
+ module2.exports.spawn = spawn2;
1648
1648
  module2.exports.sync = spawnSync;
1649
1649
  module2.exports._parse = parse2;
1650
1650
  module2.exports._enoent = enoent;
@@ -3196,7 +3196,7 @@ function isInstalled2({ pipPath, pythonPath }) {
3196
3196
  }
3197
3197
 
3198
3198
  // src/start-dev-server.ts
3199
- var import_child_process2 = require("child_process");
3199
+ var import_child_process = require("child_process");
3200
3200
  var import_fs4 = require("fs");
3201
3201
  var import_path4 = require("path");
3202
3202
  var import_build_utils4 = require("@vercel/build-utils");
@@ -3289,30 +3289,8 @@ async function detectFastapiEntrypoint(workPath, configuredEntrypoint) {
3289
3289
  }
3290
3290
 
3291
3291
  // src/utils.ts
3292
- var import_child_process = require("child_process");
3293
3292
  var import_fs3 = __toESM(require("fs"));
3294
3293
  var import_path3 = require("path");
3295
- var tryImport = (pythonPath, mod) => new Promise((res) => {
3296
- const check = (0, import_child_process.spawn)(pythonPath, ["-c", `import ${mod}`], {
3297
- stdio: ["ignore", "pipe", "pipe"]
3298
- });
3299
- check.once("error", () => res(false));
3300
- check.once("exit", (code) => res(code === 0));
3301
- });
3302
- var detectAsgiServer = async (workPath, pythonPath) => new Promise((resolve, reject) => {
3303
- tryImport(pythonPath, "uvicorn").then((hasUvicorn) => {
3304
- if (hasUvicorn)
3305
- return resolve("uvicorn");
3306
- tryImport(pythonPath, "hypercorn").then((hasHypercorn) => {
3307
- if (hasHypercorn)
3308
- return resolve("hypercorn");
3309
- const { venvRoot } = useVirtualEnv(workPath, {}, pythonPath);
3310
- const baseErrorMessage = 'No ASGI server found. Please install either "uvicorn" or "hypercorn" (e.g. "pip install uvicorn").';
3311
- const errorMessage = !venvRoot ? `${baseErrorMessage} If you are using a virtual environment, please activate it and try again.` : baseErrorMessage;
3312
- reject(new Error(errorMessage));
3313
- });
3314
- });
3315
- });
3316
3294
  var isInVirtualEnv = () => {
3317
3295
  return process.env.VIRTUAL_ENV;
3318
3296
  };
@@ -3363,6 +3341,7 @@ var ANSI_PATTERN = "[\\u001B\\u009B][[\\]()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[
3363
3341
  var ANSI_ESCAPE_RE = new RegExp(ANSI_PATTERN, "g");
3364
3342
  var stripAnsi = (s) => s.replace(ANSI_ESCAPE_RE, "");
3365
3343
  var ASGI_SHIM_MODULE = "vc_init_dev_asgi";
3344
+ var WSGI_SHIM_MODULE = "vc_init_dev_wsgi";
3366
3345
  var PERSISTENT_SERVERS = /* @__PURE__ */ new Map();
3367
3346
  var PENDING_STARTS = /* @__PURE__ */ new Map();
3368
3347
  var restoreWarnings = null;
@@ -3406,7 +3385,7 @@ function installGlobalCleanupHandlers() {
3406
3385
  killAll();
3407
3386
  });
3408
3387
  }
3409
- function createDevStaticShim(workPath, modulePath) {
3388
+ function createDevAsgiShim(workPath, modulePath) {
3410
3389
  try {
3411
3390
  const vercelPythonDir = (0, import_path4.join)(workPath, ".vercel", "python");
3412
3391
  (0, import_fs4.mkdirSync)(vercelPythonDir, { recursive: true });
@@ -3422,24 +3401,55 @@ function createDevStaticShim(workPath, modulePath) {
3422
3401
  return null;
3423
3402
  }
3424
3403
  }
3404
+ function createDevWsgiShim(workPath, modulePath) {
3405
+ try {
3406
+ const vercelPythonDir = (0, import_path4.join)(workPath, ".vercel", "python");
3407
+ (0, import_fs4.mkdirSync)(vercelPythonDir, { recursive: true });
3408
+ const shimPath = (0, import_path4.join)(vercelPythonDir, `${WSGI_SHIM_MODULE}.py`);
3409
+ const templatePath = (0, import_path4.join)(__dirname, "..", `${WSGI_SHIM_MODULE}.py`);
3410
+ const template = (0, import_fs4.readFileSync)(templatePath, "utf8");
3411
+ const shimSource = template.replace(/__VC_DEV_MODULE_PATH__/g, modulePath);
3412
+ (0, import_fs4.writeFileSync)(shimPath, shimSource, "utf8");
3413
+ (0, import_build_utils4.debug)(`Prepared Python dev WSGI shim at ${shimPath}`);
3414
+ return WSGI_SHIM_MODULE;
3415
+ } catch (err) {
3416
+ (0, import_build_utils4.debug)(`Failed to prepare dev WSGI shim: ${err?.message || err}`);
3417
+ return null;
3418
+ }
3419
+ }
3425
3420
  var startDevServer = async (opts) => {
3426
3421
  const { entrypoint: rawEntrypoint, workPath, meta = {}, config } = opts;
3427
- if (config?.framework !== "fastapi") {
3422
+ const framework = config?.framework;
3423
+ if (framework !== "fastapi" && framework !== "flask") {
3428
3424
  return null;
3429
3425
  }
3430
3426
  if (!restoreWarnings)
3431
3427
  restoreWarnings = silenceNodeWarnings();
3432
3428
  installGlobalCleanupHandlers();
3433
- const detected = await detectFastapiEntrypoint(workPath, rawEntrypoint);
3434
- if (!detected) {
3435
- throw new Error(
3436
- `No FastAPI entrypoint found. Searched for: ${FASTAPI_CANDIDATE_ENTRYPOINTS.join(", ")}`
3429
+ let entry = null;
3430
+ if (framework === "fastapi") {
3431
+ const detectedFastapi = await detectFastapiEntrypoint(
3432
+ workPath,
3433
+ rawEntrypoint
3437
3434
  );
3435
+ if (!detectedFastapi) {
3436
+ throw new Error(
3437
+ `No FastAPI entrypoint found. Searched for: ${FASTAPI_CANDIDATE_ENTRYPOINTS.join(", ")}`
3438
+ );
3439
+ }
3440
+ entry = detectedFastapi;
3441
+ } else {
3442
+ const detectedFlask = await detectFlaskEntrypoint(workPath, rawEntrypoint);
3443
+ if (!detectedFlask) {
3444
+ throw new Error(
3445
+ `No Flask entrypoint found. Searched for: ${FLASK_CANDIDATE_ENTRYPOINTS.join(", ")}`
3446
+ );
3447
+ }
3448
+ entry = detectedFlask;
3438
3449
  }
3439
- const entry = detected;
3440
3450
  const modulePath = entry.replace(/\.py$/i, "").replace(/[\\/]/g, ".");
3441
3451
  const env = { ...process.env, ...meta.env || {} };
3442
- const serverKey = `${workPath}::${entry}`;
3452
+ const serverKey = `${workPath}::${entry}::${framework}`;
3443
3453
  const existing = PERSISTENT_SERVERS.get(serverKey);
3444
3454
  if (existing) {
3445
3455
  return {
@@ -3492,40 +3502,30 @@ var startDevServer = async (opts) => {
3492
3502
  (0, import_build_utils4.debug)(`Using virtualenv at ${venvRoot}`);
3493
3503
  } else {
3494
3504
  (0, import_build_utils4.debug)("No virtualenv found");
3505
+ try {
3506
+ const yellow = "\x1B[33m";
3507
+ const reset = "\x1B[0m";
3508
+ const venvCmd = process.platform === "win32" ? "python -m venv .venv && .venv\\Scripts\\activate" : "python -m venv .venv && source .venv/bin/activate";
3509
+ process.stderr.write(
3510
+ `${yellow}Warning: no virtual environment detected in ${workPath}. Using system Python: ${pythonCmd}.${reset}
3511
+ If you are using a virtual environment, activate it before running "vercel dev", or create one: ${venvCmd}
3512
+ `
3513
+ );
3514
+ } catch (_) {
3515
+ }
3495
3516
  }
3496
3517
  }
3497
- const devShimModule = createDevStaticShim(workPath, modulePath);
3498
- if (devShimModule) {
3499
- const vercelPythonDir = (0, import_path4.join)(workPath, ".vercel", "python");
3500
- const existingPythonPath = env.PYTHONPATH || "";
3501
- env.PYTHONPATH = existingPythonPath ? `${vercelPythonDir}:${existingPythonPath}` : vercelPythonDir;
3502
- }
3503
- detectAsgiServer(workPath, pythonCmd).then(async (serverKind) => {
3504
- if (resolved)
3505
- return;
3506
- const host = "127.0.0.1";
3507
- const argv = serverKind === "uvicorn" ? [
3508
- "-u",
3509
- "-m",
3510
- "uvicorn",
3511
- `${devShimModule || modulePath}:app`,
3512
- "--host",
3513
- host,
3514
- "--port",
3515
- "0",
3516
- "--use-colors"
3517
- ] : [
3518
- "-u",
3519
- "-m",
3520
- "hypercorn",
3521
- `${devShimModule || modulePath}:app`,
3522
- "-b",
3523
- `${host}:0`
3524
- ];
3525
- (0, import_build_utils4.debug)(
3526
- `Starting dev server (${serverKind}): ${pythonCmd} ${argv.join(" ")}`
3527
- );
3528
- const child = (0, import_child_process2.spawn)(pythonCmd, argv, {
3518
+ if (framework === "fastapi") {
3519
+ const devShimModule = createDevAsgiShim(workPath, modulePath);
3520
+ if (devShimModule) {
3521
+ const vercelPythonDir = (0, import_path4.join)(workPath, ".vercel", "python");
3522
+ const existingPythonPath = env.PYTHONPATH || "";
3523
+ env.PYTHONPATH = existingPythonPath ? `${vercelPythonDir}:${existingPythonPath}` : vercelPythonDir;
3524
+ }
3525
+ const moduleToRun = devShimModule || modulePath;
3526
+ const argv = ["-u", "-m", moduleToRun];
3527
+ (0, import_build_utils4.debug)(`Starting ASGI dev server: ${pythonCmd} ${argv.join(" ")}`);
3528
+ const child = (0, import_child_process.spawn)(pythonCmd, argv, {
3529
3529
  cwd: workPath,
3530
3530
  env,
3531
3531
  stdio: ["inherit", "pipe", "pipe"]
@@ -3587,16 +3587,90 @@ var startDevServer = async (opts) => {
3587
3587
  child.once("exit", (code, signal) => {
3588
3588
  if (!resolved) {
3589
3589
  const err = new Error(
3590
- `${serverKind} server exited before binding (code=${code}, signal=${signal})`
3590
+ `ASGI dev server exited before binding (code=${code}, signal=${signal})`
3591
3591
  );
3592
3592
  rejectChildReady(err);
3593
3593
  reject(err);
3594
3594
  }
3595
3595
  });
3596
- }).catch((err) => {
3597
- rejectChildReady(err);
3598
- reject(err);
3599
- });
3596
+ } else {
3597
+ const devShimModule = createDevWsgiShim(workPath, modulePath);
3598
+ if (devShimModule) {
3599
+ const vercelPythonDir = (0, import_path4.join)(workPath, ".vercel", "python");
3600
+ const existingPythonPath = env.PYTHONPATH || "";
3601
+ env.PYTHONPATH = existingPythonPath ? `${vercelPythonDir}:${existingPythonPath}` : vercelPythonDir;
3602
+ }
3603
+ const moduleToRun = devShimModule || modulePath;
3604
+ const argv = ["-u", "-m", moduleToRun];
3605
+ (0, import_build_utils4.debug)(`Starting Flask dev server: ${pythonCmd} ${argv.join(" ")}`);
3606
+ const child = (0, import_child_process.spawn)(pythonCmd, argv, {
3607
+ cwd: workPath,
3608
+ env,
3609
+ stdio: ["inherit", "pipe", "pipe"]
3610
+ });
3611
+ childProcess = child;
3612
+ stdoutLogListener = (buf) => {
3613
+ const s = buf.toString();
3614
+ for (const line of s.split(/\r?\n/)) {
3615
+ if (line) {
3616
+ process.stdout.write(line.endsWith("\n") ? line : line + "\n");
3617
+ }
3618
+ }
3619
+ };
3620
+ stderrLogListener = (buf) => {
3621
+ const s = buf.toString();
3622
+ for (const line of s.split(/\r?\n/)) {
3623
+ if (line) {
3624
+ process.stderr.write(line.endsWith("\n") ? line : line + "\n");
3625
+ }
3626
+ }
3627
+ };
3628
+ child.stdout?.on("data", stdoutLogListener);
3629
+ child.stderr?.on("data", stderrLogListener);
3630
+ const readinessRegexes = [
3631
+ /Werkzeug running on https?:\/\/(?:\[[^\]]+\]|[^:]+):(\d+)/i,
3632
+ /(?:Running|Serving) on https?:\/\/(?:\[[^\]]+\]|[^:\s]+):(\d+)/i
3633
+ ];
3634
+ const onDetect = (chunk) => {
3635
+ const text = chunk.toString();
3636
+ const clean = stripAnsi(text);
3637
+ let portMatch = null;
3638
+ for (const rx of readinessRegexes) {
3639
+ const m = clean.match(rx);
3640
+ if (m) {
3641
+ portMatch = m;
3642
+ break;
3643
+ }
3644
+ }
3645
+ if (portMatch && child.pid) {
3646
+ if (!resolved) {
3647
+ resolved = true;
3648
+ child.stdout?.removeListener("data", onDetect);
3649
+ child.stderr?.removeListener("data", onDetect);
3650
+ const port2 = Number(portMatch[1]);
3651
+ resolveChildReady({ port: port2, pid: child.pid });
3652
+ resolve();
3653
+ }
3654
+ }
3655
+ };
3656
+ child.stdout?.on("data", onDetect);
3657
+ child.stderr?.on("data", onDetect);
3658
+ child.once("error", (err) => {
3659
+ if (!resolved) {
3660
+ rejectChildReady(err);
3661
+ reject(err);
3662
+ }
3663
+ });
3664
+ child.once("exit", (code, signal) => {
3665
+ if (!resolved) {
3666
+ const err = new Error(
3667
+ `Flask dev server exited before binding (code=${code}, signal=${signal})`
3668
+ );
3669
+ rejectChildReady(err);
3670
+ reject(err);
3671
+ }
3672
+ });
3673
+ }
3600
3674
  });
3601
3675
  const { port, pid } = await childReady;
3602
3676
  PERSISTENT_SERVERS.set(serverKey, {
package/package.json CHANGED
@@ -1,13 +1,14 @@
1
1
  {
2
2
  "name": "@vercel/python",
3
- "version": "5.0.8",
3
+ "version": "5.0.10",
4
4
  "main": "./dist/index.js",
5
5
  "license": "Apache-2.0",
6
6
  "homepage": "https://vercel.com/docs/runtimes#official-runtimes/python",
7
7
  "files": [
8
8
  "dist",
9
9
  "vc_init.py",
10
- "vc_init_dev_asgi.py"
10
+ "vc_init_dev_asgi.py",
11
+ "vc_init_dev_wsgi.py"
11
12
  ],
12
13
  "repository": {
13
14
  "type": "git",
@@ -20,7 +21,7 @@
20
21
  "@types/jest": "27.4.1",
21
22
  "@types/node": "14.18.33",
22
23
  "@types/which": "3.0.0",
23
- "@vercel/build-utils": "12.1.2",
24
+ "@vercel/build-utils": "12.1.3",
24
25
  "cross-env": "7.0.3",
25
26
  "execa": "^1.0.0",
26
27
  "fs-extra": "11.1.1",
package/vc_init.py CHANGED
@@ -1,4 +1,3 @@
1
- from __future__ import annotations
2
1
  import sys
3
2
  import os
4
3
  import site
@@ -6,19 +5,9 @@ import importlib
6
5
  import base64
7
6
  import json
8
7
  import inspect
9
- import threading
10
- import asyncio
11
- import http
12
- import time
13
8
  from importlib import util
14
- from http.server import BaseHTTPRequestHandler, ThreadingHTTPServer
9
+ from http.server import BaseHTTPRequestHandler
15
10
  import socket
16
- import functools
17
- import logging
18
- import builtins
19
- from typing import Callable, Literal
20
- import contextvars
21
- import io
22
11
 
23
12
  _here = os.path.dirname(__file__)
24
13
  _vendor_rel = '__VC_HANDLER_VENDOR_DIR'
@@ -62,69 +51,72 @@ def format_headers(headers, decode=False):
62
51
  keyToList[key].append(value)
63
52
  return keyToList
64
53
 
65
- # Custom logging handler so logs are properly categorized
66
- class VCLogHandler(logging.Handler):
67
- def __init__(self, send_message: Callable[[dict], None], context_getter: Callable[[], dict] | None = None):
68
- super().__init__()
69
- self._send_message = send_message
70
- self._context_getter = context_getter
54
+ if 'VERCEL_IPC_PATH' in os.environ:
55
+ from http.server import ThreadingHTTPServer
56
+ import http
57
+ import time
58
+ import contextvars
59
+ import functools
60
+ import builtins
61
+ import logging
71
62
 
72
- def emit(self, record):
73
- try:
74
- message = record.getMessage()
75
- except Exception:
76
- try:
77
- message = f"{record.msg}"
78
- except Exception:
79
- message = ""
80
-
81
- if record.levelno >= logging.CRITICAL:
82
- level = "fatal"
83
- elif record.levelno >= logging.ERROR:
84
- level = "error"
85
- elif record.levelno >= logging.WARNING:
86
- level = "warn"
87
- elif record.levelno >= logging.INFO:
88
- level = "info"
89
- else:
90
- level = "debug"
63
+ start_time = time.time()
64
+ sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
65
+ sock.connect(os.getenv("VERCEL_IPC_PATH", ""))
91
66
 
92
- ctx = None
93
- try:
94
- ctx = self._context_getter() if self._context_getter is not None else None
95
- except Exception:
96
- ctx = None
67
+ send_message = lambda message: sock.sendall((json.dumps(message) + '\0').encode())
68
+ storage = contextvars.ContextVar('storage', default=None)
97
69
 
98
- if ctx is not None:
99
- try:
100
- self._send_message({
101
- "type": "log",
102
- "payload": {
103
- "context": {
104
- "invocationId": ctx['invocationId'],
105
- "requestId": ctx['requestId'],
106
- },
107
- "message": base64.b64encode(message.encode()).decode(),
108
- "level": level,
109
- }
110
- })
111
- except Exception:
112
- pass
113
- else:
114
- try:
115
- sys.stdout.write(message + "\n")
116
- except Exception:
117
- pass
70
+ # Override urlopen from urllib3 (& requests) to send Request Metrics
71
+ try:
72
+ import urllib3
73
+ from urllib.parse import urlparse
118
74
 
75
+ def timed_request(func):
76
+ fetchId = 0
77
+ @functools.wraps(func)
78
+ def wrapper(self, method, url, *args, **kwargs):
79
+ nonlocal fetchId
80
+ fetchId += 1
81
+ start_time = int(time.time() * 1000)
82
+ result = func(self, method, url, *args, **kwargs)
83
+ elapsed_time = int(time.time() * 1000) - start_time
84
+ parsed_url = urlparse(url)
85
+ context = storage.get()
86
+ if context is not None:
87
+ send_message({
88
+ "type": "metric",
89
+ "payload": {
90
+ "context": {
91
+ "invocationId": context['invocationId'],
92
+ "requestId": context['requestId'],
93
+ },
94
+ "type": "fetch-metric",
95
+ "payload": {
96
+ "pathname": parsed_url.path,
97
+ "search": parsed_url.query,
98
+ "start": start_time,
99
+ "duration": elapsed_time,
100
+ "host": parsed_url.hostname or self.host,
101
+ "statusCode": result.status,
102
+ "method": method,
103
+ "id": fetchId
104
+ }
105
+ }
106
+ })
107
+ return result
108
+ return wrapper
109
+ urllib3.connectionpool.HTTPConnectionPool.urlopen = timed_request(urllib3.connectionpool.HTTPConnectionPool.urlopen)
110
+ except:
111
+ pass
119
112
 
120
- def setup_logging(send_message: Callable[[dict], None], storage: contextvars.ContextVar[dict | None]):
121
113
  # Override sys.stdout and sys.stderr to map logs to the correct request
122
114
  class StreamWrapper:
123
- def __init__(self, stream: io.TextIOBase, stream_name: Literal["stdout", "stderr"]):
115
+ def __init__(self, stream, stream_name):
124
116
  self.stream = stream
125
117
  self.stream_name = stream_name
126
118
 
127
- def write(self, message: str):
119
+ def write(self, message):
128
120
  context = storage.get()
129
121
  if context is not None:
130
122
  send_message({
@@ -147,15 +139,19 @@ def setup_logging(send_message: Callable[[dict], None], storage: contextvars.Con
147
139
  sys.stdout = StreamWrapper(sys.stdout, "stdout")
148
140
  sys.stderr = StreamWrapper(sys.stderr, "stderr")
149
141
 
150
- # Wrap top-level logging helpers to emit structured logs when a request
151
- # context is available; otherwise fall back to the original behavior.
152
- def logging_wrapper(func: Callable[..., None], level: str = "info") -> Callable[..., None]:
142
+ # Override the global print to log to stdout
143
+ def print_wrapper(func):
153
144
  @functools.wraps(func)
154
145
  def wrapper(*args, **kwargs):
155
- try:
156
- context = storage.get()
157
- except Exception:
158
- context = None
146
+ sys.stdout.write(' '.join(map(str, args)) + '\n')
147
+ return wrapper
148
+ builtins.print = print_wrapper(builtins.print)
149
+
150
+ # Override logging to maps logs to the correct request
151
+ def logging_wrapper(func, level="info"):
152
+ @functools.wraps(func)
153
+ def wrapper(*args, **kwargs):
154
+ context = storage.get()
159
155
  if context is not None:
160
156
  send_message({
161
157
  "type": "log",
@@ -172,77 +168,12 @@ def setup_logging(send_message: Callable[[dict], None], storage: contextvars.Con
172
168
  func(*args, **kwargs)
173
169
  return wrapper
174
170
 
175
- logging.basicConfig(level=logging.INFO, handlers=[VCLogHandler(send_message, storage.get)], force=True)
176
- logging.debug = logging_wrapper(logging.debug, "debug")
177
- logging.info = logging_wrapper(logging.info, "info")
171
+ logging.basicConfig(level=logging.INFO)
172
+ logging.debug = logging_wrapper(logging.debug)
173
+ logging.info = logging_wrapper(logging.info)
178
174
  logging.warning = logging_wrapper(logging.warning, "warn")
179
175
  logging.error = logging_wrapper(logging.error, "error")
180
- logging.fatal = logging_wrapper(logging.fatal, "fatal")
181
- logging.critical = logging_wrapper(logging.critical, "fatal")
182
-
183
- # Ensure built-in print funnels through stdout wrapper so prints are
184
- # attributed to the current request context.
185
- def print_wrapper(func: Callable[..., None]) -> Callable[..., None]:
186
- @functools.wraps(func)
187
- def wrapper(*args, **kwargs):
188
- sys.stdout.write(' '.join(map(str, args)) + '\n')
189
- return wrapper
190
-
191
- builtins.print = print_wrapper(builtins.print)
192
-
193
-
194
- if 'VERCEL_IPC_PATH' in os.environ:
195
- start_time = time.time()
196
- sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
197
- sock.connect(os.getenv("VERCEL_IPC_PATH", ""))
198
-
199
- send_message = lambda message: sock.sendall((json.dumps(message) + '\0').encode())
200
- storage = contextvars.ContextVar('storage', default=None)
201
-
202
- # Override urlopen from urllib3 (& requests) to send Request Metrics
203
- try:
204
- import urllib3
205
- from urllib.parse import urlparse
206
-
207
- def timed_request(func):
208
- fetchId = 0
209
- @functools.wraps(func)
210
- def wrapper(self, method, url, *args, **kwargs):
211
- nonlocal fetchId
212
- fetchId += 1
213
- start_time = int(time.time() * 1000)
214
- result = func(self, method, url, *args, **kwargs)
215
- elapsed_time = int(time.time() * 1000) - start_time
216
- parsed_url = urlparse(url)
217
- context = storage.get()
218
- if context is not None:
219
- send_message({
220
- "type": "metric",
221
- "payload": {
222
- "context": {
223
- "invocationId": context['invocationId'],
224
- "requestId": context['requestId'],
225
- },
226
- "type": "fetch-metric",
227
- "payload": {
228
- "pathname": parsed_url.path,
229
- "search": parsed_url.query,
230
- "start": start_time,
231
- "duration": elapsed_time,
232
- "host": parsed_url.hostname or self.host,
233
- "statusCode": result.status,
234
- "method": method,
235
- "id": fetchId
236
- }
237
- }
238
- })
239
- return result
240
- return wrapper
241
- urllib3.connectionpool.HTTPConnectionPool.urlopen = timed_request(urllib3.connectionpool.HTTPConnectionPool.urlopen)
242
- except:
243
- pass
244
-
245
- setup_logging(send_message, storage)
176
+ logging.critical = logging_wrapper(logging.critical, "error")
246
177
 
247
178
  class BaseHandler(BaseHTTPRequestHandler):
248
179
  # Re-implementation of BaseHTTPRequestHandler's log_message method to
@@ -325,7 +256,6 @@ if 'VERCEL_IPC_PATH' in os.environ:
325
256
  method()
326
257
  self.wfile.flush()
327
258
  elif 'app' in __vc_variables:
328
- # WSGI
329
259
  if (
330
260
  not inspect.iscoroutinefunction(__vc_module.app) and
331
261
  not inspect.iscoroutinefunction(__vc_module.app.__call__)
@@ -391,10 +321,10 @@ if 'VERCEL_IPC_PATH' in os.environ:
391
321
  finally:
392
322
  if hasattr(response, 'close'):
393
323
  response.close()
394
- # ASGI
395
324
  else:
396
325
  from urllib.parse import urlparse
397
326
  from io import BytesIO
327
+ import asyncio
398
328
 
399
329
  app = __vc_module.app
400
330
 
@@ -409,7 +339,6 @@ if 'VERCEL_IPC_PATH' in os.environ:
409
339
  headers_encoded.append([k.lower().encode(), [i.encode() for i in v]])
410
340
  else:
411
341
  headers_encoded.append([k.lower().encode(), v.encode()])
412
-
413
342
  scope = {
414
343
  'server': (self.headers.get('host', 'lambda'), self.headers.get('x-forwarded-port', 80)),
415
344
  'client': (self.headers.get(
@@ -432,96 +361,41 @@ if 'VERCEL_IPC_PATH' in os.environ:
432
361
  else:
433
362
  body = b''
434
363
 
435
- # Event to signal that the response has been fully sent
436
- response_done = threading.Event()
437
- # Event to signal the ASGI app has fully completed (incl. background tasks)
438
- app_done = threading.Event()
439
-
440
- # Propagate request context to background thread for logging & metrics
441
- request_context = storage.get()
364
+ if _use_legacy_asyncio:
365
+ loop = asyncio.new_event_loop()
366
+ app_queue = asyncio.Queue(loop=loop)
367
+ else:
368
+ app_queue = asyncio.Queue()
369
+ app_queue.put_nowait({'type': 'http.request', 'body': body, 'more_body': False})
370
+
371
+ # Prepare ASGI receive function
372
+ async def receive():
373
+ message = await app_queue.get()
374
+ return message
375
+
376
+ # Prepare ASGI send function
377
+ response_started = False
378
+ async def send(event):
379
+ nonlocal response_started
380
+ if event['type'] == 'http.response.start':
381
+ self.send_response(event['status'])
382
+ if 'headers' in event:
383
+ for name, value in event['headers']:
384
+ self.send_header(name.decode(), value.decode())
385
+ self.end_headers()
386
+ response_started = True
387
+ elif event['type'] == 'http.response.body':
388
+ self.wfile.write(event['body'])
389
+ if not event.get('more_body', False):
390
+ self.wfile.flush()
442
391
 
443
- def run_asgi():
444
- # Ensure request context is available in this thread
445
- if request_context is not None:
446
- token = storage.set(request_context)
447
- else:
448
- token = None
449
- # Track if headers were sent, so we can synthesize a 500 on early failure
450
- response_started = False
451
- try:
452
- async def runner():
453
- # Per-request app queue
454
- if _use_legacy_asyncio:
455
- loop = asyncio.get_running_loop()
456
- app_queue = asyncio.Queue(loop=loop)
457
- else:
458
- app_queue = asyncio.Queue()
459
-
460
- await app_queue.put({'type': 'http.request', 'body': body, 'more_body': False})
461
-
462
- async def receive():
463
- message = await app_queue.get()
464
- return message
465
-
466
- async def send(event):
467
- nonlocal response_started
468
- if event['type'] == 'http.response.start':
469
- self.send_response(event['status'])
470
- if 'headers' in event:
471
- for name, value in event['headers']:
472
- self.send_header(name.decode(), value.decode())
473
- self.end_headers()
474
- response_started = True
475
- elif event['type'] == 'http.response.body':
476
- # Stream body as it is produced; flush on completion
477
- body_bytes = event.get('body', b'') or b''
478
- if body_bytes:
479
- self.wfile.write(body_bytes)
480
- if not event.get('more_body', False):
481
- try:
482
- self.wfile.flush()
483
- finally:
484
- response_done.set()
485
- try:
486
- app_queue.put_nowait({'type': 'http.disconnect'})
487
- except Exception:
488
- pass
489
-
490
- # Run ASGI app (includes background tasks)
491
- asgi_instance = app(scope, receive, send)
492
- await asgi_instance
493
- # Mark app completion when the ASGI callable returns
494
- app_done.set()
495
-
496
- asyncio.run(runner())
497
- except Exception:
498
- # If the app raised before starting the response, synthesize a 500
499
- try:
500
- if not response_started:
501
- self.send_response(500)
502
- self.end_headers()
503
- try:
504
- self.wfile.flush()
505
- except Exception:
506
- pass
507
- except Exception:
508
- pass
509
- finally:
510
- # Always unblock the waiting thread to avoid hangs
511
- response_done.set()
512
- # Ensure app completion is always signaled
513
- app_done.set()
514
- if token is not None:
515
- storage.reset(token)
516
-
517
- # Run ASGI in background thread to allow returning after final flush
518
- t = threading.Thread(target=run_asgi, daemon=True)
519
- t.start()
520
-
521
- # Wait until final body chunk has been flushed to client
522
- response_done.wait()
523
- # Also wait until the ASGI app finishes (includes background tasks)
524
- app_done.wait()
392
+ # Run the ASGI application
393
+ asgi_instance = app(scope, receive, send)
394
+ if _use_legacy_asyncio:
395
+ asgi_task = loop.create_task(asgi_instance)
396
+ loop.run_until_complete(asgi_task)
397
+ else:
398
+ asyncio.run(asgi_instance)
525
399
 
526
400
  if 'Handler' in locals():
527
401
  server = ThreadingHTTPServer(('127.0.0.1', 0), Handler)
@@ -1,8 +1,23 @@
1
1
  # Auto-generated template used by vercel dev (Python, ASGI)
2
2
  # Serves static files from PUBLIC_DIR before delegating to the user ASGI app.
3
- from importlib import import_module
3
+ import sys
4
4
  import os
5
5
  from os import path as _p
6
+ from importlib import import_module
7
+
8
+
9
+ # Simple ANSI coloring. Respect NO_COLOR environment variable.
10
+ _NO_COLOR = 'NO_COLOR' in os.environ
11
+ _RESET = "\x1b[0m"
12
+ _YELLOW = "\x1b[33m"
13
+ _GREEN = "\x1b[32m"
14
+ _RED = "\x1b[31m"
15
+
16
+ def _color(text: str, code: str) -> str:
17
+ if _NO_COLOR:
18
+ return text
19
+ return f"{code}{text}{_RESET}"
20
+
6
21
 
7
22
  # Optional StaticFiles import; tolerate missing deps
8
23
  StaticFiles = None
@@ -56,3 +71,29 @@ async def app(scope, receive, send):
56
71
  except Exception:
57
72
  pass
58
73
  await USER_ASGI_APP(scope, receive, send)
74
+
75
+
76
+ if __name__ == '__main__':
77
+ # Development runner for ASGI: prefer uvicorn, then hypercorn.
78
+ # Bind to localhost on an ephemeral port and emit a recognizable log line
79
+ # so the caller can detect the bound port.
80
+ host = '127.0.0.1'
81
+ try:
82
+ import uvicorn
83
+ uvicorn.run('vc_init_dev_asgi:app', host=host, port=0, use_colors=True)
84
+ except Exception:
85
+ try:
86
+ import asyncio
87
+ from hypercorn.config import Config
88
+ from hypercorn.asyncio import serve
89
+
90
+ config = Config()
91
+ config.bind = [f'{host}:0']
92
+
93
+ async def _run():
94
+ await serve(app, config)
95
+
96
+ asyncio.run(_run())
97
+ except Exception:
98
+ print(_color('No ASGI server found. Please install either "uvicorn" or "hypercorn" (e.g. "pip install uvicorn").', _RED), file=sys.stderr)
99
+ sys.exit(1)
@@ -0,0 +1,123 @@
1
+ """
2
+ Auto-generated template used by vercel dev (Python, WSGI)
3
+ Serves static files from PUBLIC_DIR before delegating to the user WSGI app.
4
+
5
+ This file is written to the project at .vercel/python/vc_init_dev_wsgi.py
6
+ and imported by the dev server launcher.
7
+ """
8
+ from importlib import import_module
9
+ from os import path as _p
10
+ import os
11
+ import mimetypes
12
+
13
+ # Simple ANSI coloring. Respect NO_COLOR environment variable.
14
+ _NO_COLOR = 'NO_COLOR' in os.environ
15
+ _RESET = "\x1b[0m"
16
+ _YELLOW = "\x1b[33m"
17
+ _GREEN = "\x1b[32m"
18
+
19
+ def _color(text: str, code: str) -> str:
20
+ if _NO_COLOR:
21
+ return text
22
+ return f"{code}{text}{_RESET}"
23
+
24
+ USER_MODULE = "__VC_DEV_MODULE_PATH__"
25
+ PUBLIC_DIR = "public"
26
+
27
+ _mod = import_module(USER_MODULE)
28
+ _app = getattr(_mod, "app", None)
29
+ if _app is None:
30
+ raise RuntimeError(
31
+ f"Missing 'app' in module '{USER_MODULE}'. Define `app = ...` (WSGI app)."
32
+ )
33
+
34
+
35
+ def _is_safe_file(base_dir: str, target: str) -> bool:
36
+ try:
37
+ base = _p.realpath(base_dir)
38
+ tgt = _p.realpath(target)
39
+ return (tgt == base or tgt.startswith(base + os.sep)) and _p.isfile(tgt)
40
+ except Exception:
41
+ return False
42
+
43
+
44
+ def _static_wsgi_app(environ, start_response):
45
+ # Only handle GET/HEAD requests for static assets
46
+ if environ.get("REQUEST_METHOD", "GET") not in ("GET", "HEAD"):
47
+ return _not_found(start_response)
48
+
49
+ req_path = environ.get("PATH_INFO", "/") or "/"
50
+ safe = _p.normpath(req_path).lstrip("/")
51
+ full = _p.join(PUBLIC_DIR, safe)
52
+ if not _is_safe_file(PUBLIC_DIR, full):
53
+ return _not_found(start_response)
54
+
55
+ ctype, encoding = mimetypes.guess_type(full)
56
+ headers = [("Content-Type", ctype or "application/octet-stream")]
57
+ try:
58
+ # For HEAD requests, send headers only
59
+ if environ.get("REQUEST_METHOD") == "HEAD":
60
+ start_response("200 OK", headers)
61
+ return []
62
+ with open(full, "rb") as f:
63
+ data = f.read()
64
+ headers.append(("Content-Length", str(len(data))))
65
+ start_response("200 OK", headers)
66
+ return [data]
67
+ except Exception:
68
+ return _not_found(start_response)
69
+
70
+
71
+ def _not_found(start_response):
72
+ start_response("404 Not Found", [("Content-Type", "text/plain; charset=utf-8")])
73
+ return [b"Not Found"]
74
+
75
+
76
+ def _combined_app(environ, start_response):
77
+ # Try static first; if 404 then delegate to user app
78
+ captured_status = ""
79
+ captured_headers = tuple()
80
+ body_chunks = []
81
+
82
+ def capture_start_response(status, headers, exc_info=None): # type: ignore[no-redef]
83
+ nonlocal captured_status, captured_headers
84
+ captured_status = status
85
+ captured_headers = tuple(headers)
86
+ # Return a writer that buffers the body
87
+ def write(chunk: bytes):
88
+ body_chunks.append(chunk)
89
+ return write
90
+
91
+ result = _static_wsgi_app(environ, capture_start_response)
92
+ # If static handler produced 200, forward its response
93
+ if captured_status.startswith("200 "):
94
+ # Send headers and any chunks collected
95
+ writer = start_response(captured_status, list(captured_headers))
96
+ for chunk in body_chunks:
97
+ writer(chunk)
98
+ return result
99
+
100
+ # Otherwise, delegate to user's WSGI app
101
+ return _app(environ, start_response)
102
+
103
+
104
+ # Public WSGI application consumed by the dev runner
105
+ app = _combined_app
106
+
107
+
108
+ if __name__ == "__main__":
109
+ # Development runner: prefer Werkzeug, fall back to stdlib wsgiref.
110
+ # Bind to localhost on an ephemeral port and emit a recognizable log line
111
+ # so the caller can detect the bound port.
112
+ host = "127.0.0.1"
113
+ try:
114
+ from werkzeug.serving import run_simple
115
+ run_simple(host, 0, app, use_reloader=False)
116
+ except Exception:
117
+ import sys
118
+ print(_color("Werkzeug not installed; falling back to wsgiref (no reloader).", _YELLOW), file=sys.stderr)
119
+ from wsgiref.simple_server import make_server
120
+ httpd = make_server(host, 0, app)
121
+ port = httpd.server_port
122
+ print(_color(f"Serving on http://{host}:{port}", _GREEN))
123
+ httpd.serve_forever()