@vercel/python 5.0.6 → 5.0.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +94 -44
- package/package.json +1 -1
- package/vc_init.py +139 -72
package/dist/index.js
CHANGED
|
@@ -2836,16 +2836,10 @@ async function getUserScriptsDir(pythonPath) {
|
|
|
2836
2836
|
return null;
|
|
2837
2837
|
}
|
|
2838
2838
|
}
|
|
2839
|
-
async function pipInstall(pipPath,
|
|
2839
|
+
async function pipInstall(pipPath, uvPath, workPath, args, targetDir) {
|
|
2840
2840
|
const target = targetDir ? (0, import_path.join)(targetDir, resolveVendorDir()) : resolveVendorDir();
|
|
2841
2841
|
process.env.PIP_USER = "0";
|
|
2842
|
-
|
|
2843
|
-
try {
|
|
2844
|
-
uvBin = await getUvBinaryOrInstall(pythonPath);
|
|
2845
|
-
} catch (err) {
|
|
2846
|
-
console.log("Failed to install uv, falling back to pip");
|
|
2847
|
-
}
|
|
2848
|
-
if (uvBin) {
|
|
2842
|
+
if (uvPath) {
|
|
2849
2843
|
const uvArgs = [
|
|
2850
2844
|
"pip",
|
|
2851
2845
|
"install",
|
|
@@ -2855,10 +2849,10 @@ async function pipInstall(pipPath, pythonPath, workPath, args, targetDir) {
|
|
|
2855
2849
|
target,
|
|
2856
2850
|
...args
|
|
2857
2851
|
];
|
|
2858
|
-
const prettyUv = `${
|
|
2852
|
+
const prettyUv = `${uvPath} ${uvArgs.join(" ")}`;
|
|
2859
2853
|
(0, import_build_utils.debug)(`Running "${prettyUv}"...`);
|
|
2860
2854
|
try {
|
|
2861
|
-
await (0, import_execa.default)(
|
|
2855
|
+
await (0, import_execa.default)(uvPath, uvArgs, {
|
|
2862
2856
|
cwd: workPath
|
|
2863
2857
|
});
|
|
2864
2858
|
return;
|
|
@@ -2932,10 +2926,8 @@ async function maybeFindUvBin(pythonPath) {
|
|
|
2932
2926
|
}
|
|
2933
2927
|
async function getUvBinaryOrInstall(pythonPath) {
|
|
2934
2928
|
const uvBin = await maybeFindUvBin(pythonPath);
|
|
2935
|
-
if (uvBin)
|
|
2936
|
-
console.log(`Using uv at "${uvBin}"`);
|
|
2929
|
+
if (uvBin)
|
|
2937
2930
|
return uvBin;
|
|
2938
|
-
}
|
|
2939
2931
|
try {
|
|
2940
2932
|
console.log("Installing uv...");
|
|
2941
2933
|
await (0, import_execa.default)(
|
|
@@ -2966,6 +2958,7 @@ async function getUvBinaryOrInstall(pythonPath) {
|
|
|
2966
2958
|
async function installRequirement({
|
|
2967
2959
|
pythonPath,
|
|
2968
2960
|
pipPath,
|
|
2961
|
+
uvPath,
|
|
2969
2962
|
dependency,
|
|
2970
2963
|
version: version2,
|
|
2971
2964
|
workPath,
|
|
@@ -2981,11 +2974,12 @@ async function installRequirement({
|
|
|
2981
2974
|
return;
|
|
2982
2975
|
}
|
|
2983
2976
|
const exact = `${dependency}==${version2}`;
|
|
2984
|
-
await pipInstall(pipPath,
|
|
2977
|
+
await pipInstall(pipPath, uvPath, workPath, [exact, ...args], targetDir);
|
|
2985
2978
|
}
|
|
2986
2979
|
async function installRequirementsFile({
|
|
2987
2980
|
pythonPath,
|
|
2988
2981
|
pipPath,
|
|
2982
|
+
uvPath,
|
|
2989
2983
|
filePath,
|
|
2990
2984
|
workPath,
|
|
2991
2985
|
targetDir,
|
|
@@ -2999,27 +2993,29 @@ async function installRequirementsFile({
|
|
|
2999
2993
|
}
|
|
3000
2994
|
await pipInstall(
|
|
3001
2995
|
pipPath,
|
|
3002
|
-
|
|
2996
|
+
uvPath,
|
|
3003
2997
|
workPath,
|
|
3004
2998
|
["--upgrade", "-r", filePath, ...args],
|
|
3005
2999
|
targetDir
|
|
3006
3000
|
);
|
|
3007
3001
|
}
|
|
3008
|
-
async function exportRequirementsFromUv(
|
|
3002
|
+
async function exportRequirementsFromUv(projectDir, uvPath, options = {}) {
|
|
3009
3003
|
const { locked = false } = options;
|
|
3010
|
-
|
|
3004
|
+
if (!uvPath) {
|
|
3005
|
+
throw new Error("uv is not available to export requirements");
|
|
3006
|
+
}
|
|
3011
3007
|
const args = ["export"];
|
|
3012
3008
|
if (locked) {
|
|
3013
3009
|
args.push("--frozen");
|
|
3014
3010
|
}
|
|
3015
|
-
(0, import_build_utils.debug)(`Running "${
|
|
3011
|
+
(0, import_build_utils.debug)(`Running "${uvPath} ${args.join(" ")}" in ${projectDir}...`);
|
|
3016
3012
|
let stdout;
|
|
3017
3013
|
try {
|
|
3018
|
-
const { stdout: out } = await (0, import_execa.default)(
|
|
3014
|
+
const { stdout: out } = await (0, import_execa.default)(uvPath, args, { cwd: projectDir });
|
|
3019
3015
|
stdout = out;
|
|
3020
3016
|
} catch (err) {
|
|
3021
3017
|
throw new Error(
|
|
3022
|
-
`Failed to run "${
|
|
3018
|
+
`Failed to run "${uvPath} ${args.join(" ")}": ${err instanceof Error ? err.message : String(err)}`
|
|
3023
3019
|
);
|
|
3024
3020
|
}
|
|
3025
3021
|
const tmpDir = await import_fs.default.promises.mkdtemp((0, import_path.join)(import_os.default.tmpdir(), "vercel-uv-"));
|
|
@@ -3031,6 +3027,7 @@ async function exportRequirementsFromUv(pythonPath, projectDir, options = {}) {
|
|
|
3031
3027
|
async function exportRequirementsFromPipfile({
|
|
3032
3028
|
pythonPath,
|
|
3033
3029
|
pipPath,
|
|
3030
|
+
uvPath,
|
|
3034
3031
|
projectDir,
|
|
3035
3032
|
meta
|
|
3036
3033
|
}) {
|
|
@@ -3044,7 +3041,8 @@ async function exportRequirementsFromPipfile({
|
|
|
3044
3041
|
version: "0.3.0",
|
|
3045
3042
|
workPath: tempDir,
|
|
3046
3043
|
meta,
|
|
3047
|
-
args: ["--no-warn-script-location"]
|
|
3044
|
+
args: ["--no-warn-script-location"],
|
|
3045
|
+
uvPath
|
|
3048
3046
|
});
|
|
3049
3047
|
const tempVendorDir = (0, import_path.join)(tempDir, resolveVendorDir());
|
|
3050
3048
|
const convertCmd = isWin ? (0, import_path.join)(tempVendorDir, "Scripts", "pipfile2req.exe") : (0, import_path.join)(tempVendorDir, "bin", "pipfile2req");
|
|
@@ -3151,16 +3149,27 @@ function getSupportedPythonVersion({
|
|
|
3151
3149
|
}
|
|
3152
3150
|
if (isInstalled2(requested)) {
|
|
3153
3151
|
selection = requested;
|
|
3152
|
+
console.log(`Using Python ${selection.version} from ${source}`);
|
|
3154
3153
|
} else {
|
|
3155
3154
|
console.warn(
|
|
3156
3155
|
`Warning: Python version "${version2}" detected in ${source} is not installed and will be ignored. http://vercel.link/python-version`
|
|
3157
3156
|
);
|
|
3157
|
+
console.log(
|
|
3158
|
+
`Falling back to latest installed version: ${selection.version}`
|
|
3159
|
+
);
|
|
3158
3160
|
}
|
|
3159
3161
|
} else {
|
|
3160
3162
|
console.warn(
|
|
3161
3163
|
`Warning: Python version "${version2}" detected in ${source} is invalid and will be ignored. http://vercel.link/python-version`
|
|
3162
3164
|
);
|
|
3165
|
+
console.log(
|
|
3166
|
+
`Falling back to latest installed version: ${selection.version}`
|
|
3167
|
+
);
|
|
3163
3168
|
}
|
|
3169
|
+
} else {
|
|
3170
|
+
console.log(
|
|
3171
|
+
`No Python version specified in pyproject.toml or Pipfile.lock. Using latest installed version: ${selection.version}`
|
|
3172
|
+
);
|
|
3164
3173
|
}
|
|
3165
3174
|
if (isDiscontinued(selection)) {
|
|
3166
3175
|
throw new import_build_utils2.NowBuildError({
|
|
@@ -3606,7 +3615,6 @@ var build = async ({
|
|
|
3606
3615
|
meta = {},
|
|
3607
3616
|
config
|
|
3608
3617
|
}) => {
|
|
3609
|
-
let pythonVersion = getLatestPythonVersion(meta);
|
|
3610
3618
|
workPath = await downloadFilesInWorkPath({
|
|
3611
3619
|
workPath,
|
|
3612
3620
|
files: originalFiles,
|
|
@@ -3655,22 +3663,26 @@ var build = async ({
|
|
|
3655
3663
|
});
|
|
3656
3664
|
const pipfileLockDir = fsFiles[(0, import_path5.join)(entryDirectory, "Pipfile.lock")] ? (0, import_path5.join)(workPath, entryDirectory) : fsFiles["Pipfile.lock"] ? workPath : null;
|
|
3657
3665
|
const pipfileDir = fsFiles[(0, import_path5.join)(entryDirectory, "Pipfile")] ? (0, import_path5.join)(workPath, entryDirectory) : fsFiles["Pipfile"] ? workPath : null;
|
|
3666
|
+
let declaredPythonVersion;
|
|
3658
3667
|
if (pyprojectDir) {
|
|
3659
3668
|
let requiresPython;
|
|
3660
3669
|
try {
|
|
3661
3670
|
const pyproject = await (0, import_build_utils6.readConfigFile)((0, import_path5.join)(pyprojectDir, "pyproject.toml"));
|
|
3662
3671
|
requiresPython = pyproject?.project?.["requires-python"];
|
|
3663
|
-
} catch {
|
|
3664
|
-
(0, import_build_utils5.debug)("Failed to parse pyproject.toml");
|
|
3672
|
+
} catch (err) {
|
|
3673
|
+
(0, import_build_utils5.debug)("Failed to parse pyproject.toml", err);
|
|
3665
3674
|
}
|
|
3666
3675
|
const VERSION_REGEX = /\b\d+\.\d+\b/;
|
|
3667
3676
|
const exact = requiresPython?.trim().match(VERSION_REGEX)?.[0];
|
|
3668
3677
|
if (exact) {
|
|
3669
|
-
|
|
3670
|
-
|
|
3671
|
-
|
|
3672
|
-
|
|
3673
|
-
|
|
3678
|
+
declaredPythonVersion = { version: exact, source: "pyproject.toml" };
|
|
3679
|
+
(0, import_build_utils5.debug)(
|
|
3680
|
+
`Found Python version ${exact} in pyproject.toml (requires-python: "${requiresPython}")`
|
|
3681
|
+
);
|
|
3682
|
+
} else if (requiresPython) {
|
|
3683
|
+
(0, import_build_utils5.debug)(
|
|
3684
|
+
`Could not parse Python version from pyproject.toml requires-python: "${requiresPython}"`
|
|
3685
|
+
);
|
|
3674
3686
|
}
|
|
3675
3687
|
} else if (pipfileLockDir) {
|
|
3676
3688
|
let lock = {};
|
|
@@ -3684,11 +3696,15 @@ var build = async ({
|
|
|
3684
3696
|
});
|
|
3685
3697
|
}
|
|
3686
3698
|
const pyFromLock = lock?._meta?.requires?.python_version;
|
|
3687
|
-
|
|
3688
|
-
|
|
3689
|
-
|
|
3690
|
-
}
|
|
3699
|
+
if (pyFromLock) {
|
|
3700
|
+
declaredPythonVersion = { version: pyFromLock, source: "Pipfile.lock" };
|
|
3701
|
+
(0, import_build_utils5.debug)(`Found Python version ${pyFromLock} in Pipfile.lock`);
|
|
3702
|
+
}
|
|
3691
3703
|
}
|
|
3704
|
+
const pythonVersion = getSupportedPythonVersion({
|
|
3705
|
+
isDev: meta.isDev,
|
|
3706
|
+
declaredPythonVersion
|
|
3707
|
+
});
|
|
3692
3708
|
fsFiles = await (0, import_build_utils5.glob)("**", workPath);
|
|
3693
3709
|
const requirementsTxt = (0, import_path5.join)(entryDirectory, "requirements.txt");
|
|
3694
3710
|
const vendorBaseDir = (0, import_path5.join)(
|
|
@@ -3704,10 +3720,42 @@ var build = async ({
|
|
|
3704
3720
|
console.log("Failed to create vendor cache directory");
|
|
3705
3721
|
throw err;
|
|
3706
3722
|
}
|
|
3707
|
-
|
|
3723
|
+
let installationSource;
|
|
3724
|
+
if (uvLockDir && pyprojectDir) {
|
|
3725
|
+
installationSource = "uv.lock";
|
|
3726
|
+
} else if (pyprojectDir) {
|
|
3727
|
+
installationSource = "pyproject.toml";
|
|
3728
|
+
} else if (pipfileLockDir) {
|
|
3729
|
+
installationSource = "Pipfile.lock";
|
|
3730
|
+
} else if (pipfileDir) {
|
|
3731
|
+
installationSource = "Pipfile";
|
|
3732
|
+
} else if (fsFiles[requirementsTxt] || fsFiles["requirements.txt"]) {
|
|
3733
|
+
installationSource = "requirements.txt";
|
|
3734
|
+
}
|
|
3735
|
+
if (installationSource) {
|
|
3736
|
+
console.log(
|
|
3737
|
+
`Installing required dependencies from ${installationSource}...`
|
|
3738
|
+
);
|
|
3739
|
+
} else {
|
|
3740
|
+
console.log("Installing required dependencies...");
|
|
3741
|
+
}
|
|
3742
|
+
let uvPath = null;
|
|
3743
|
+
try {
|
|
3744
|
+
uvPath = await getUvBinaryOrInstall(pythonVersion.pythonPath);
|
|
3745
|
+
console.log(`Using uv at "${uvPath}"`);
|
|
3746
|
+
} catch (err) {
|
|
3747
|
+
if (uvLockDir || pyprojectDir && !hasReqLocal && !hasReqGlobal) {
|
|
3748
|
+
console.log("Failed to install uv");
|
|
3749
|
+
throw new Error(
|
|
3750
|
+
`uv is required for this project but failed to install: ${err instanceof Error ? err.message : String(err)}`
|
|
3751
|
+
);
|
|
3752
|
+
}
|
|
3753
|
+
(0, import_build_utils5.debug)("Failed to install uv", err);
|
|
3754
|
+
}
|
|
3708
3755
|
await installRequirement({
|
|
3709
3756
|
pythonPath: pythonVersion.pythonPath,
|
|
3710
3757
|
pipPath: pythonVersion.pipPath,
|
|
3758
|
+
uvPath,
|
|
3711
3759
|
dependency: "werkzeug",
|
|
3712
3760
|
version: "1.0.1",
|
|
3713
3761
|
workPath,
|
|
@@ -3718,14 +3766,13 @@ var build = async ({
|
|
|
3718
3766
|
if (uvLockDir) {
|
|
3719
3767
|
(0, import_build_utils5.debug)('Found "uv.lock"');
|
|
3720
3768
|
if (pyprojectDir) {
|
|
3721
|
-
const exportedReq = await exportRequirementsFromUv(
|
|
3722
|
-
|
|
3723
|
-
|
|
3724
|
-
{ locked: true }
|
|
3725
|
-
);
|
|
3769
|
+
const exportedReq = await exportRequirementsFromUv(pyprojectDir, uvPath, {
|
|
3770
|
+
locked: true
|
|
3771
|
+
});
|
|
3726
3772
|
await installRequirementsFile({
|
|
3727
3773
|
pythonPath: pythonVersion.pythonPath,
|
|
3728
3774
|
pipPath: pythonVersion.pipPath,
|
|
3775
|
+
uvPath,
|
|
3729
3776
|
filePath: exportedReq,
|
|
3730
3777
|
workPath,
|
|
3731
3778
|
targetDir: vendorBaseDir,
|
|
@@ -3742,14 +3789,13 @@ var build = async ({
|
|
|
3742
3789
|
"Detected both pyproject.toml and requirements.txt but no lockfile; using pyproject.toml"
|
|
3743
3790
|
);
|
|
3744
3791
|
}
|
|
3745
|
-
const exportedReq = await exportRequirementsFromUv(
|
|
3746
|
-
|
|
3747
|
-
|
|
3748
|
-
{ locked: false }
|
|
3749
|
-
);
|
|
3792
|
+
const exportedReq = await exportRequirementsFromUv(pyprojectDir, uvPath, {
|
|
3793
|
+
locked: false
|
|
3794
|
+
});
|
|
3750
3795
|
await installRequirementsFile({
|
|
3751
3796
|
pythonPath: pythonVersion.pythonPath,
|
|
3752
3797
|
pipPath: pythonVersion.pipPath,
|
|
3798
|
+
uvPath,
|
|
3753
3799
|
filePath: exportedReq,
|
|
3754
3800
|
workPath,
|
|
3755
3801
|
targetDir: vendorBaseDir,
|
|
@@ -3764,12 +3810,14 @@ var build = async ({
|
|
|
3764
3810
|
const exportedReq = await exportRequirementsFromPipfile({
|
|
3765
3811
|
pythonPath: pythonVersion.pythonPath,
|
|
3766
3812
|
pipPath: pythonVersion.pipPath,
|
|
3813
|
+
uvPath,
|
|
3767
3814
|
projectDir: pipfileLockDir || pipfileDir,
|
|
3768
3815
|
meta
|
|
3769
3816
|
});
|
|
3770
3817
|
await installRequirementsFile({
|
|
3771
3818
|
pythonPath: pythonVersion.pythonPath,
|
|
3772
3819
|
pipPath: pythonVersion.pipPath,
|
|
3820
|
+
uvPath,
|
|
3773
3821
|
filePath: exportedReq,
|
|
3774
3822
|
workPath,
|
|
3775
3823
|
targetDir: vendorBaseDir,
|
|
@@ -3784,6 +3832,7 @@ var build = async ({
|
|
|
3784
3832
|
await installRequirementsFile({
|
|
3785
3833
|
pythonPath: pythonVersion.pythonPath,
|
|
3786
3834
|
pipPath: pythonVersion.pipPath,
|
|
3835
|
+
uvPath,
|
|
3787
3836
|
filePath: requirementsTxtPath,
|
|
3788
3837
|
workPath,
|
|
3789
3838
|
targetDir: vendorBaseDir,
|
|
@@ -3795,6 +3844,7 @@ var build = async ({
|
|
|
3795
3844
|
await installRequirementsFile({
|
|
3796
3845
|
pythonPath: pythonVersion.pythonPath,
|
|
3797
3846
|
pipPath: pythonVersion.pipPath,
|
|
3847
|
+
uvPath,
|
|
3798
3848
|
filePath: requirementsTxtPath,
|
|
3799
3849
|
workPath,
|
|
3800
3850
|
targetDir: vendorBaseDir,
|
package/package.json
CHANGED
package/vc_init.py
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
1
2
|
import sys
|
|
2
3
|
import os
|
|
3
4
|
import site
|
|
@@ -7,9 +8,17 @@ import json
|
|
|
7
8
|
import inspect
|
|
8
9
|
import threading
|
|
9
10
|
import asyncio
|
|
11
|
+
import http
|
|
12
|
+
import time
|
|
10
13
|
from importlib import util
|
|
11
|
-
from http.server import BaseHTTPRequestHandler
|
|
14
|
+
from http.server import BaseHTTPRequestHandler, ThreadingHTTPServer
|
|
12
15
|
import socket
|
|
16
|
+
import functools
|
|
17
|
+
import logging
|
|
18
|
+
import builtins
|
|
19
|
+
from typing import Callable, Literal
|
|
20
|
+
import contextvars
|
|
21
|
+
import io
|
|
13
22
|
|
|
14
23
|
_here = os.path.dirname(__file__)
|
|
15
24
|
_vendor_rel = '__VC_HANDLER_VENDOR_DIR'
|
|
@@ -53,72 +62,69 @@ def format_headers(headers, decode=False):
|
|
|
53
62
|
keyToList[key].append(value)
|
|
54
63
|
return keyToList
|
|
55
64
|
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
import builtins
|
|
63
|
-
import logging
|
|
65
|
+
# Custom logging handler so logs are properly categorized
|
|
66
|
+
class VCLogHandler(logging.Handler):
|
|
67
|
+
def __init__(self, send_message: Callable[[dict], None], context_getter: Callable[[], dict] | None = None):
|
|
68
|
+
super().__init__()
|
|
69
|
+
self._send_message = send_message
|
|
70
|
+
self._context_getter = context_getter
|
|
64
71
|
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
72
|
+
def emit(self, record):
|
|
73
|
+
try:
|
|
74
|
+
message = record.getMessage()
|
|
75
|
+
except Exception:
|
|
76
|
+
try:
|
|
77
|
+
message = f"{record.msg}"
|
|
78
|
+
except Exception:
|
|
79
|
+
message = ""
|
|
80
|
+
|
|
81
|
+
if record.levelno >= logging.CRITICAL:
|
|
82
|
+
level = "fatal"
|
|
83
|
+
elif record.levelno >= logging.ERROR:
|
|
84
|
+
level = "error"
|
|
85
|
+
elif record.levelno >= logging.WARNING:
|
|
86
|
+
level = "warn"
|
|
87
|
+
elif record.levelno >= logging.INFO:
|
|
88
|
+
level = "info"
|
|
89
|
+
else:
|
|
90
|
+
level = "debug"
|
|
68
91
|
|
|
69
|
-
|
|
70
|
-
|
|
92
|
+
ctx = None
|
|
93
|
+
try:
|
|
94
|
+
ctx = self._context_getter() if self._context_getter is not None else None
|
|
95
|
+
except Exception:
|
|
96
|
+
ctx = None
|
|
71
97
|
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
98
|
+
if ctx is not None:
|
|
99
|
+
try:
|
|
100
|
+
self._send_message({
|
|
101
|
+
"type": "log",
|
|
102
|
+
"payload": {
|
|
103
|
+
"context": {
|
|
104
|
+
"invocationId": ctx['invocationId'],
|
|
105
|
+
"requestId": ctx['requestId'],
|
|
106
|
+
},
|
|
107
|
+
"message": base64.b64encode(message.encode()).decode(),
|
|
108
|
+
"level": level,
|
|
109
|
+
}
|
|
110
|
+
})
|
|
111
|
+
except Exception:
|
|
112
|
+
pass
|
|
113
|
+
else:
|
|
114
|
+
try:
|
|
115
|
+
sys.stdout.write(message + "\n")
|
|
116
|
+
except Exception:
|
|
117
|
+
pass
|
|
76
118
|
|
|
77
|
-
def timed_request(func):
|
|
78
|
-
fetchId = 0
|
|
79
|
-
@functools.wraps(func)
|
|
80
|
-
def wrapper(self, method, url, *args, **kwargs):
|
|
81
|
-
nonlocal fetchId
|
|
82
|
-
fetchId += 1
|
|
83
|
-
start_time = int(time.time() * 1000)
|
|
84
|
-
result = func(self, method, url, *args, **kwargs)
|
|
85
|
-
elapsed_time = int(time.time() * 1000) - start_time
|
|
86
|
-
parsed_url = urlparse(url)
|
|
87
|
-
context = storage.get()
|
|
88
|
-
if context is not None:
|
|
89
|
-
send_message({
|
|
90
|
-
"type": "metric",
|
|
91
|
-
"payload": {
|
|
92
|
-
"context": {
|
|
93
|
-
"invocationId": context['invocationId'],
|
|
94
|
-
"requestId": context['requestId'],
|
|
95
|
-
},
|
|
96
|
-
"type": "fetch-metric",
|
|
97
|
-
"payload": {
|
|
98
|
-
"pathname": parsed_url.path,
|
|
99
|
-
"search": parsed_url.query,
|
|
100
|
-
"start": start_time,
|
|
101
|
-
"duration": elapsed_time,
|
|
102
|
-
"host": parsed_url.hostname or self.host,
|
|
103
|
-
"statusCode": result.status,
|
|
104
|
-
"method": method,
|
|
105
|
-
"id": fetchId
|
|
106
|
-
}
|
|
107
|
-
}
|
|
108
|
-
})
|
|
109
|
-
return result
|
|
110
|
-
return wrapper
|
|
111
|
-
urllib3.connectionpool.HTTPConnectionPool.urlopen = timed_request(urllib3.connectionpool.HTTPConnectionPool.urlopen)
|
|
112
|
-
except:
|
|
113
|
-
pass
|
|
114
119
|
|
|
120
|
+
def setup_logging(send_message: Callable[[dict], None], storage: contextvars.ContextVar[dict | None]):
|
|
115
121
|
# Override sys.stdout and sys.stderr to map logs to the correct request
|
|
116
122
|
class StreamWrapper:
|
|
117
|
-
def __init__(self, stream, stream_name):
|
|
123
|
+
def __init__(self, stream: io.TextIOBase, stream_name: Literal["stdout", "stderr"]):
|
|
118
124
|
self.stream = stream
|
|
119
125
|
self.stream_name = stream_name
|
|
120
126
|
|
|
121
|
-
def write(self, message):
|
|
127
|
+
def write(self, message: str):
|
|
122
128
|
context = storage.get()
|
|
123
129
|
if context is not None:
|
|
124
130
|
send_message({
|
|
@@ -141,19 +147,15 @@ if 'VERCEL_IPC_PATH' in os.environ:
|
|
|
141
147
|
sys.stdout = StreamWrapper(sys.stdout, "stdout")
|
|
142
148
|
sys.stderr = StreamWrapper(sys.stderr, "stderr")
|
|
143
149
|
|
|
144
|
-
#
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
def wrapper(*args, **kwargs):
|
|
148
|
-
sys.stdout.write(' '.join(map(str, args)) + '\n')
|
|
149
|
-
return wrapper
|
|
150
|
-
builtins.print = print_wrapper(builtins.print)
|
|
151
|
-
|
|
152
|
-
# Override logging to maps logs to the correct request
|
|
153
|
-
def logging_wrapper(func, level="info"):
|
|
150
|
+
# Wrap top-level logging helpers to emit structured logs when a request
|
|
151
|
+
# context is available; otherwise fall back to the original behavior.
|
|
152
|
+
def logging_wrapper(func: Callable[..., None], level: str = "info") -> Callable[..., None]:
|
|
154
153
|
@functools.wraps(func)
|
|
155
154
|
def wrapper(*args, **kwargs):
|
|
156
|
-
|
|
155
|
+
try:
|
|
156
|
+
context = storage.get()
|
|
157
|
+
except Exception:
|
|
158
|
+
context = None
|
|
157
159
|
if context is not None:
|
|
158
160
|
send_message({
|
|
159
161
|
"type": "log",
|
|
@@ -170,12 +172,77 @@ if 'VERCEL_IPC_PATH' in os.environ:
|
|
|
170
172
|
func(*args, **kwargs)
|
|
171
173
|
return wrapper
|
|
172
174
|
|
|
173
|
-
logging.basicConfig(level=logging.INFO)
|
|
174
|
-
logging.debug = logging_wrapper(logging.debug)
|
|
175
|
-
logging.info = logging_wrapper(logging.info)
|
|
175
|
+
logging.basicConfig(level=logging.INFO, handlers=[VCLogHandler(send_message, storage.get)], force=True)
|
|
176
|
+
logging.debug = logging_wrapper(logging.debug, "debug")
|
|
177
|
+
logging.info = logging_wrapper(logging.info, "info")
|
|
176
178
|
logging.warning = logging_wrapper(logging.warning, "warn")
|
|
177
179
|
logging.error = logging_wrapper(logging.error, "error")
|
|
178
|
-
logging.
|
|
180
|
+
logging.fatal = logging_wrapper(logging.fatal, "fatal")
|
|
181
|
+
logging.critical = logging_wrapper(logging.critical, "fatal")
|
|
182
|
+
|
|
183
|
+
# Ensure built-in print funnels through stdout wrapper so prints are
|
|
184
|
+
# attributed to the current request context.
|
|
185
|
+
def print_wrapper(func: Callable[..., None]) -> Callable[..., None]:
|
|
186
|
+
@functools.wraps(func)
|
|
187
|
+
def wrapper(*args, **kwargs):
|
|
188
|
+
sys.stdout.write(' '.join(map(str, args)) + '\n')
|
|
189
|
+
return wrapper
|
|
190
|
+
|
|
191
|
+
builtins.print = print_wrapper(builtins.print)
|
|
192
|
+
|
|
193
|
+
|
|
194
|
+
if 'VERCEL_IPC_PATH' in os.environ:
|
|
195
|
+
start_time = time.time()
|
|
196
|
+
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
|
|
197
|
+
sock.connect(os.getenv("VERCEL_IPC_PATH", ""))
|
|
198
|
+
|
|
199
|
+
send_message = lambda message: sock.sendall((json.dumps(message) + '\0').encode())
|
|
200
|
+
storage = contextvars.ContextVar('storage', default=None)
|
|
201
|
+
|
|
202
|
+
# Override urlopen from urllib3 (& requests) to send Request Metrics
|
|
203
|
+
try:
|
|
204
|
+
import urllib3
|
|
205
|
+
from urllib.parse import urlparse
|
|
206
|
+
|
|
207
|
+
def timed_request(func):
|
|
208
|
+
fetchId = 0
|
|
209
|
+
@functools.wraps(func)
|
|
210
|
+
def wrapper(self, method, url, *args, **kwargs):
|
|
211
|
+
nonlocal fetchId
|
|
212
|
+
fetchId += 1
|
|
213
|
+
start_time = int(time.time() * 1000)
|
|
214
|
+
result = func(self, method, url, *args, **kwargs)
|
|
215
|
+
elapsed_time = int(time.time() * 1000) - start_time
|
|
216
|
+
parsed_url = urlparse(url)
|
|
217
|
+
context = storage.get()
|
|
218
|
+
if context is not None:
|
|
219
|
+
send_message({
|
|
220
|
+
"type": "metric",
|
|
221
|
+
"payload": {
|
|
222
|
+
"context": {
|
|
223
|
+
"invocationId": context['invocationId'],
|
|
224
|
+
"requestId": context['requestId'],
|
|
225
|
+
},
|
|
226
|
+
"type": "fetch-metric",
|
|
227
|
+
"payload": {
|
|
228
|
+
"pathname": parsed_url.path,
|
|
229
|
+
"search": parsed_url.query,
|
|
230
|
+
"start": start_time,
|
|
231
|
+
"duration": elapsed_time,
|
|
232
|
+
"host": parsed_url.hostname or self.host,
|
|
233
|
+
"statusCode": result.status,
|
|
234
|
+
"method": method,
|
|
235
|
+
"id": fetchId
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
})
|
|
239
|
+
return result
|
|
240
|
+
return wrapper
|
|
241
|
+
urllib3.connectionpool.HTTPConnectionPool.urlopen = timed_request(urllib3.connectionpool.HTTPConnectionPool.urlopen)
|
|
242
|
+
except:
|
|
243
|
+
pass
|
|
244
|
+
|
|
245
|
+
setup_logging(send_message, storage)
|
|
179
246
|
|
|
180
247
|
class BaseHandler(BaseHTTPRequestHandler):
|
|
181
248
|
# Re-implementation of BaseHTTPRequestHandler's log_message method to
|