@vercel/python 5.0.1 → 5.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +115 -53
- package/package.json +1 -1
- package/vc_init.py +26 -2
package/dist/index.js
CHANGED
|
@@ -2651,7 +2651,7 @@ ${stderr}${stdout}`;
|
|
|
2651
2651
|
var require_lib = __commonJS({
|
|
2652
2652
|
"../../node_modules/.pnpm/which@3.0.0/node_modules/which/lib/index.js"(exports, module2) {
|
|
2653
2653
|
var isexe = require_isexe();
|
|
2654
|
-
var { join:
|
|
2654
|
+
var { join: join3, delimiter, sep, posix } = require("path");
|
|
2655
2655
|
var isWindows = process.platform === "win32";
|
|
2656
2656
|
var rSlash = new RegExp(`[${posix.sep}${sep === posix.sep ? "" : sep}]`.replace(/(\\)/g, "\\$1"));
|
|
2657
2657
|
var rRel = new RegExp(`^\\.${rSlash.source}`);
|
|
@@ -2680,7 +2680,7 @@ var require_lib = __commonJS({
|
|
|
2680
2680
|
var getPathPart = (raw, cmd) => {
|
|
2681
2681
|
const pathPart = /^".*"$/.test(raw) ? raw.slice(1, -1) : raw;
|
|
2682
2682
|
const prefix = !pathPart && rRel.test(cmd) ? cmd.slice(0, 2) : "";
|
|
2683
|
-
return prefix +
|
|
2683
|
+
return prefix + join3(pathPart, cmd);
|
|
2684
2684
|
};
|
|
2685
2685
|
var which2 = async (cmd, opt = {}) => {
|
|
2686
2686
|
const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt);
|
|
@@ -2749,11 +2749,12 @@ module.exports = __toCommonJS(src_exports);
|
|
|
2749
2749
|
var import_fs = __toESM(require("fs"));
|
|
2750
2750
|
var import_execa2 = __toESM(require_execa());
|
|
2751
2751
|
var import_util = require("util");
|
|
2752
|
-
var
|
|
2752
|
+
var import_path2 = require("path");
|
|
2753
2753
|
var import_build_utils3 = require("@vercel/build-utils");
|
|
2754
2754
|
|
|
2755
2755
|
// src/install.ts
|
|
2756
2756
|
var import_execa = __toESM(require_execa());
|
|
2757
|
+
var import_path = require("path");
|
|
2757
2758
|
var import_build_utils = require("@vercel/build-utils");
|
|
2758
2759
|
var makeDependencyCheckCode = (dependency) => `
|
|
2759
2760
|
from importlib import util
|
|
@@ -2768,7 +2769,8 @@ async function isInstalled(pythonPath, dependency, cwd) {
|
|
|
2768
2769
|
["-c", makeDependencyCheckCode(dependency)],
|
|
2769
2770
|
{
|
|
2770
2771
|
stdio: "pipe",
|
|
2771
|
-
cwd
|
|
2772
|
+
cwd,
|
|
2773
|
+
env: { ...process.env, PYTHONPATH: (0, import_path.join)(cwd, resolveVendorDir()) }
|
|
2772
2774
|
}
|
|
2773
2775
|
);
|
|
2774
2776
|
return stdout.startsWith(cwd);
|
|
@@ -2790,7 +2792,8 @@ async function areRequirementsInstalled(pythonPath, requirementsPath, cwd) {
|
|
|
2790
2792
|
["-c", makeRequirementsCheckCode(requirementsPath)],
|
|
2791
2793
|
{
|
|
2792
2794
|
stdio: "pipe",
|
|
2793
|
-
cwd
|
|
2795
|
+
cwd,
|
|
2796
|
+
env: { ...process.env, PYTHONPATH: (0, import_path.join)(cwd, resolveVendorDir()) }
|
|
2794
2797
|
}
|
|
2795
2798
|
);
|
|
2796
2799
|
return true;
|
|
@@ -2798,13 +2801,18 @@ async function areRequirementsInstalled(pythonPath, requirementsPath, cwd) {
|
|
|
2798
2801
|
return false;
|
|
2799
2802
|
}
|
|
2800
2803
|
}
|
|
2804
|
+
function resolveVendorDir() {
|
|
2805
|
+
const vendorDir = process.env.VERCEL_PYTHON_VENDOR_DIR || "_vendor";
|
|
2806
|
+
return vendorDir;
|
|
2807
|
+
}
|
|
2801
2808
|
async function pipInstall(pipPath, workPath, args) {
|
|
2802
|
-
const target =
|
|
2809
|
+
const target = resolveVendorDir();
|
|
2803
2810
|
process.env.PIP_USER = "0";
|
|
2804
2811
|
const cmdArgs = [
|
|
2805
2812
|
"install",
|
|
2806
2813
|
"--disable-pip-version-check",
|
|
2807
2814
|
"--no-compile",
|
|
2815
|
+
"--no-cache-dir",
|
|
2808
2816
|
"--target",
|
|
2809
2817
|
target,
|
|
2810
2818
|
...args
|
|
@@ -2959,14 +2967,15 @@ function isInstalled2({ pipPath, pythonPath }) {
|
|
|
2959
2967
|
// src/index.ts
|
|
2960
2968
|
var readFile = (0, import_util.promisify)(import_fs.default.readFile);
|
|
2961
2969
|
var writeFile = (0, import_util.promisify)(import_fs.default.writeFile);
|
|
2962
|
-
async function pipenvConvert(cmd, srcDir) {
|
|
2970
|
+
async function pipenvConvert(cmd, srcDir, env) {
|
|
2963
2971
|
(0, import_build_utils3.debug)("Running pipfile2req...");
|
|
2964
2972
|
try {
|
|
2965
2973
|
const out = await import_execa2.default.stdout(cmd, [], {
|
|
2966
|
-
cwd: srcDir
|
|
2974
|
+
cwd: srcDir,
|
|
2975
|
+
env
|
|
2967
2976
|
});
|
|
2968
2977
|
(0, import_build_utils3.debug)("Contents of requirements.txt is: " + out);
|
|
2969
|
-
import_fs.default.writeFileSync((0,
|
|
2978
|
+
import_fs.default.writeFileSync((0, import_path2.join)(srcDir, "requirements.txt"), out);
|
|
2970
2979
|
} catch (err) {
|
|
2971
2980
|
console.log('Failed to run "pipfile2req"');
|
|
2972
2981
|
throw err;
|
|
@@ -2982,8 +2991,8 @@ async function downloadFilesInWorkPath({
|
|
|
2982
2991
|
(0, import_build_utils3.debug)("Downloading user files...");
|
|
2983
2992
|
let downloadedFiles = await (0, import_build_utils3.download)(files, workPath, meta);
|
|
2984
2993
|
if (meta.isDev) {
|
|
2985
|
-
const { devCacheDir = (0,
|
|
2986
|
-
const destCache = (0,
|
|
2994
|
+
const { devCacheDir = (0, import_path2.join)(workPath, ".now", "cache") } = meta;
|
|
2995
|
+
const destCache = (0, import_path2.join)(devCacheDir, (0, import_path2.basename)(entrypoint, ".py"));
|
|
2987
2996
|
await (0, import_build_utils3.download)(downloadedFiles, destCache);
|
|
2988
2997
|
downloadedFiles = await (0, import_build_utils3.glob)("**", destCache);
|
|
2989
2998
|
workPath = destCache;
|
|
@@ -3006,57 +3015,94 @@ var build = async ({
|
|
|
3006
3015
|
});
|
|
3007
3016
|
try {
|
|
3008
3017
|
if (meta.isDev) {
|
|
3009
|
-
const setupCfg = (0,
|
|
3018
|
+
const setupCfg = (0, import_path2.join)(workPath, "setup.cfg");
|
|
3010
3019
|
await writeFile(setupCfg, "[install]\nprefix=\n");
|
|
3011
3020
|
}
|
|
3012
3021
|
} catch (err) {
|
|
3013
3022
|
console.log('Failed to create "setup.cfg" file');
|
|
3014
3023
|
throw err;
|
|
3015
3024
|
}
|
|
3025
|
+
let fsFiles = await (0, import_build_utils3.glob)("**", workPath);
|
|
3026
|
+
const entryDirectory = (0, import_path2.dirname)(entrypoint);
|
|
3027
|
+
const hasReqLocal = !!fsFiles[(0, import_path2.join)(entryDirectory, "requirements.txt")];
|
|
3028
|
+
const hasReqGlobal = !!fsFiles["requirements.txt"];
|
|
3029
|
+
const pipfileLockDir = fsFiles[(0, import_path2.join)(entryDirectory, "Pipfile.lock")] ? (0, import_path2.join)(workPath, entryDirectory) : fsFiles["Pipfile.lock"] ? workPath : null;
|
|
3030
|
+
const pipfileDir = fsFiles[(0, import_path2.join)(entryDirectory, "Pipfile")] ? (0, import_path2.join)(workPath, entryDirectory) : fsFiles["Pipfile"] ? workPath : null;
|
|
3031
|
+
if (!hasReqLocal && !hasReqGlobal && (pipfileLockDir || pipfileDir)) {
|
|
3032
|
+
if (pipfileLockDir) {
|
|
3033
|
+
(0, import_build_utils3.debug)('Found "Pipfile.lock"');
|
|
3034
|
+
} else {
|
|
3035
|
+
(0, import_build_utils3.debug)('Found "Pipfile"');
|
|
3036
|
+
}
|
|
3037
|
+
if (pipfileLockDir) {
|
|
3038
|
+
let lock = {};
|
|
3039
|
+
try {
|
|
3040
|
+
const json = await readFile(
|
|
3041
|
+
(0, import_path2.join)(pipfileLockDir, "Pipfile.lock"),
|
|
3042
|
+
"utf8"
|
|
3043
|
+
);
|
|
3044
|
+
lock = JSON.parse(json);
|
|
3045
|
+
} catch (err) {
|
|
3046
|
+
throw new import_build_utils3.NowBuildError({
|
|
3047
|
+
code: "INVALID_PIPFILE_LOCK",
|
|
3048
|
+
message: "Unable to parse Pipfile.lock"
|
|
3049
|
+
});
|
|
3050
|
+
}
|
|
3051
|
+
pythonVersion = getSupportedPythonVersion({
|
|
3052
|
+
isDev: meta.isDev,
|
|
3053
|
+
pipLockPythonVersion: lock?._meta?.requires?.python_version
|
|
3054
|
+
});
|
|
3055
|
+
}
|
|
3056
|
+
if (!hasReqLocal && !hasReqGlobal) {
|
|
3057
|
+
const tempDir = await (0, import_build_utils3.getWriteableDirectory)();
|
|
3058
|
+
await installRequirement({
|
|
3059
|
+
pythonPath: pythonVersion.pythonPath,
|
|
3060
|
+
pipPath: pythonVersion.pipPath,
|
|
3061
|
+
dependency: "pipfile-requirements",
|
|
3062
|
+
version: "0.3.0",
|
|
3063
|
+
workPath: tempDir,
|
|
3064
|
+
meta,
|
|
3065
|
+
args: ["--no-warn-script-location"]
|
|
3066
|
+
});
|
|
3067
|
+
const tempVendorDir = (0, import_path2.join)(tempDir, resolveVendorDir());
|
|
3068
|
+
const envForConvert = { ...process.env, PYTHONPATH: tempVendorDir };
|
|
3069
|
+
const convertCmd = process.platform === "win32" ? (0, import_path2.join)(tempVendorDir, "Scripts", "pipfile2req.exe") : (0, import_path2.join)(tempVendorDir, "bin", "pipfile2req");
|
|
3070
|
+
await pipenvConvert(
|
|
3071
|
+
convertCmd,
|
|
3072
|
+
pipfileLockDir || pipfileDir,
|
|
3073
|
+
envForConvert
|
|
3074
|
+
);
|
|
3075
|
+
} else {
|
|
3076
|
+
(0, import_build_utils3.debug)(
|
|
3077
|
+
'Skipping Pipfile.lock conversion because "requirements.txt" exists'
|
|
3078
|
+
);
|
|
3079
|
+
}
|
|
3080
|
+
}
|
|
3081
|
+
fsFiles = await (0, import_build_utils3.glob)("**", workPath);
|
|
3082
|
+
const requirementsTxt = (0, import_path2.join)(entryDirectory, "requirements.txt");
|
|
3083
|
+
const vendorBaseDir = (0, import_path2.join)(
|
|
3084
|
+
workPath,
|
|
3085
|
+
".vercel",
|
|
3086
|
+
"cache",
|
|
3087
|
+
"python",
|
|
3088
|
+
`py${pythonVersion.version}`,
|
|
3089
|
+
entryDirectory
|
|
3090
|
+
);
|
|
3091
|
+
try {
|
|
3092
|
+
await import_fs.default.promises.mkdir(vendorBaseDir, { recursive: true });
|
|
3093
|
+
} catch (err) {
|
|
3094
|
+
console.log("Failed to create vendor cache directory");
|
|
3095
|
+
throw err;
|
|
3096
|
+
}
|
|
3016
3097
|
console.log("Installing required dependencies...");
|
|
3017
3098
|
await installRequirement({
|
|
3018
3099
|
pythonPath: pythonVersion.pythonPath,
|
|
3019
3100
|
pipPath: pythonVersion.pipPath,
|
|
3020
3101
|
dependency: "werkzeug",
|
|
3021
3102
|
version: "1.0.1",
|
|
3022
|
-
workPath,
|
|
3103
|
+
workPath: vendorBaseDir,
|
|
3023
3104
|
meta
|
|
3024
3105
|
});
|
|
3025
|
-
let fsFiles = await (0, import_build_utils3.glob)("**", workPath);
|
|
3026
|
-
const entryDirectory = (0, import_path.dirname)(entrypoint);
|
|
3027
|
-
const pipfileLockDir = fsFiles[(0, import_path.join)(entryDirectory, "Pipfile.lock")] ? (0, import_path.join)(workPath, entryDirectory) : fsFiles["Pipfile.lock"] ? workPath : null;
|
|
3028
|
-
if (pipfileLockDir) {
|
|
3029
|
-
(0, import_build_utils3.debug)('Found "Pipfile.lock"');
|
|
3030
|
-
let lock = {};
|
|
3031
|
-
try {
|
|
3032
|
-
const json = await readFile((0, import_path.join)(pipfileLockDir, "Pipfile.lock"), "utf8");
|
|
3033
|
-
lock = JSON.parse(json);
|
|
3034
|
-
} catch (err) {
|
|
3035
|
-
throw new import_build_utils3.NowBuildError({
|
|
3036
|
-
code: "INVALID_PIPFILE_LOCK",
|
|
3037
|
-
message: "Unable to parse Pipfile.lock"
|
|
3038
|
-
});
|
|
3039
|
-
}
|
|
3040
|
-
pythonVersion = getSupportedPythonVersion({
|
|
3041
|
-
isDev: meta.isDev,
|
|
3042
|
-
pipLockPythonVersion: lock?._meta?.requires?.python_version
|
|
3043
|
-
});
|
|
3044
|
-
const tempDir = await (0, import_build_utils3.getWriteableDirectory)();
|
|
3045
|
-
await installRequirement({
|
|
3046
|
-
pythonPath: pythonVersion.pythonPath,
|
|
3047
|
-
pipPath: pythonVersion.pipPath,
|
|
3048
|
-
dependency: "pipfile-requirements",
|
|
3049
|
-
version: "0.3.0",
|
|
3050
|
-
workPath: tempDir,
|
|
3051
|
-
meta,
|
|
3052
|
-
args: ["--no-warn-script-location"]
|
|
3053
|
-
});
|
|
3054
|
-
process.env.PYTHONPATH = tempDir;
|
|
3055
|
-
const convertCmd = (0, import_path.join)(tempDir, "bin", "pipfile2req");
|
|
3056
|
-
await pipenvConvert(convertCmd, pipfileLockDir);
|
|
3057
|
-
}
|
|
3058
|
-
fsFiles = await (0, import_build_utils3.glob)("**", workPath);
|
|
3059
|
-
const requirementsTxt = (0, import_path.join)(entryDirectory, "requirements.txt");
|
|
3060
3106
|
if (fsFiles[requirementsTxt]) {
|
|
3061
3107
|
(0, import_build_utils3.debug)('Found local "requirements.txt"');
|
|
3062
3108
|
const requirementsTxtPath = fsFiles[requirementsTxt].fsPath;
|
|
@@ -3064,7 +3110,7 @@ var build = async ({
|
|
|
3064
3110
|
pythonPath: pythonVersion.pythonPath,
|
|
3065
3111
|
pipPath: pythonVersion.pipPath,
|
|
3066
3112
|
filePath: requirementsTxtPath,
|
|
3067
|
-
workPath,
|
|
3113
|
+
workPath: vendorBaseDir,
|
|
3068
3114
|
meta
|
|
3069
3115
|
});
|
|
3070
3116
|
} else if (fsFiles["requirements.txt"]) {
|
|
@@ -3074,20 +3120,22 @@ var build = async ({
|
|
|
3074
3120
|
pythonPath: pythonVersion.pythonPath,
|
|
3075
3121
|
pipPath: pythonVersion.pipPath,
|
|
3076
3122
|
filePath: requirementsTxtPath,
|
|
3077
|
-
workPath,
|
|
3123
|
+
workPath: vendorBaseDir,
|
|
3078
3124
|
meta
|
|
3079
3125
|
});
|
|
3080
3126
|
}
|
|
3081
|
-
const originalPyPath = (0,
|
|
3127
|
+
const originalPyPath = (0, import_path2.join)(__dirname, "..", "vc_init.py");
|
|
3082
3128
|
const originalHandlerPyContents = await readFile(originalPyPath, "utf8");
|
|
3083
3129
|
(0, import_build_utils3.debug)("Entrypoint is", entrypoint);
|
|
3084
3130
|
const moduleName = entrypoint.replace(/\//g, ".").replace(/\.py$/, "");
|
|
3131
|
+
const vendorDir = resolveVendorDir();
|
|
3085
3132
|
const suffix = meta.isDev && !entrypoint.endsWith(".py") ? ".py" : "";
|
|
3086
3133
|
const entrypointWithSuffix = `${entrypoint}${suffix}`;
|
|
3087
3134
|
(0, import_build_utils3.debug)("Entrypoint with suffix is", entrypointWithSuffix);
|
|
3088
|
-
const handlerPyContents = originalHandlerPyContents.replace(/__VC_HANDLER_MODULE_NAME/g, moduleName).replace(/__VC_HANDLER_ENTRYPOINT/g, entrypointWithSuffix);
|
|
3135
|
+
const handlerPyContents = originalHandlerPyContents.replace(/__VC_HANDLER_MODULE_NAME/g, moduleName).replace(/__VC_HANDLER_ENTRYPOINT/g, entrypointWithSuffix).replace(/__VC_HANDLER_VENDOR_DIR/g, vendorDir);
|
|
3089
3136
|
const predefinedExcludes = [
|
|
3090
3137
|
".git/**",
|
|
3138
|
+
".gitignore",
|
|
3091
3139
|
".vercel/**",
|
|
3092
3140
|
".pnpm-store/**",
|
|
3093
3141
|
"**/node_modules/**",
|
|
@@ -3097,11 +3145,25 @@ var build = async ({
|
|
|
3097
3145
|
"**/venv/**",
|
|
3098
3146
|
"**/__pycache__/**"
|
|
3099
3147
|
];
|
|
3148
|
+
const lambdaEnv = {};
|
|
3149
|
+
lambdaEnv.PYTHONPATH = vendorDir;
|
|
3100
3150
|
const globOptions = {
|
|
3101
3151
|
cwd: workPath,
|
|
3102
3152
|
ignore: config && typeof config.excludeFiles === "string" ? [...predefinedExcludes, config.excludeFiles] : predefinedExcludes
|
|
3103
3153
|
};
|
|
3104
3154
|
const files = await (0, import_build_utils3.glob)("**", globOptions);
|
|
3155
|
+
try {
|
|
3156
|
+
const cachedVendorAbs = (0, import_path2.join)(vendorBaseDir, resolveVendorDir());
|
|
3157
|
+
if (import_fs.default.existsSync(cachedVendorAbs)) {
|
|
3158
|
+
const vendorFiles = await (0, import_build_utils3.glob)("**", cachedVendorAbs, resolveVendorDir());
|
|
3159
|
+
for (const [p, f] of Object.entries(vendorFiles)) {
|
|
3160
|
+
files[p] = f;
|
|
3161
|
+
}
|
|
3162
|
+
}
|
|
3163
|
+
} catch (err) {
|
|
3164
|
+
console.log("Failed to include cached vendor directory");
|
|
3165
|
+
throw err;
|
|
3166
|
+
}
|
|
3105
3167
|
const handlerPyFilename = "vc__handler__python";
|
|
3106
3168
|
files[`${handlerPyFilename}.py`] = new import_build_utils3.FileBlob({ data: handlerPyContents });
|
|
3107
3169
|
if (config.framework === "fasthtml") {
|
|
@@ -3112,7 +3174,7 @@ var build = async ({
|
|
|
3112
3174
|
files,
|
|
3113
3175
|
handler: `${handlerPyFilename}.vc_handler`,
|
|
3114
3176
|
runtime: pythonVersion.runtime,
|
|
3115
|
-
environment:
|
|
3177
|
+
environment: lambdaEnv,
|
|
3116
3178
|
supportsResponseStreaming: true
|
|
3117
3179
|
});
|
|
3118
3180
|
return { output };
|
package/package.json
CHANGED
package/vc_init.py
CHANGED
|
@@ -1,14 +1,38 @@
|
|
|
1
1
|
import sys
|
|
2
|
+
import os
|
|
3
|
+
import site
|
|
4
|
+
import importlib
|
|
2
5
|
import base64
|
|
3
6
|
import json
|
|
4
7
|
import inspect
|
|
5
8
|
from importlib import util
|
|
6
9
|
from http.server import BaseHTTPRequestHandler
|
|
7
10
|
import socket
|
|
8
|
-
|
|
11
|
+
|
|
12
|
+
_here = os.path.dirname(__file__)
|
|
13
|
+
_vendor_rel = '__VC_HANDLER_VENDOR_DIR'
|
|
14
|
+
_vendor = os.path.normpath(os.path.join(_here, _vendor_rel))
|
|
15
|
+
|
|
16
|
+
if os.path.isdir(_vendor):
|
|
17
|
+
# Process .pth files like a real site-packages dir
|
|
18
|
+
site.addsitedir(_vendor)
|
|
19
|
+
|
|
20
|
+
# Move _vendor to the front (after script dir if present)
|
|
21
|
+
try:
|
|
22
|
+
while _vendor in sys.path:
|
|
23
|
+
sys.path.remove(_vendor)
|
|
24
|
+
except ValueError:
|
|
25
|
+
pass
|
|
26
|
+
|
|
27
|
+
# Put vendored deps ahead of site-packages but after the script dir
|
|
28
|
+
idx = 1 if (sys.path and sys.path[0] in ('', _here)) else 0
|
|
29
|
+
sys.path.insert(idx, _vendor)
|
|
30
|
+
|
|
31
|
+
importlib.invalidate_caches()
|
|
9
32
|
|
|
10
33
|
# Import relative path https://docs.python.org/3/library/importlib.html#importing-a-source-file-directly
|
|
11
|
-
|
|
34
|
+
user_mod_path = os.path.join(_here, "__VC_HANDLER_ENTRYPOINT") # absolute
|
|
35
|
+
__vc_spec = util.spec_from_file_location("__VC_HANDLER_MODULE_NAME", user_mod_path)
|
|
12
36
|
__vc_module = util.module_from_spec(__vc_spec)
|
|
13
37
|
sys.modules["__VC_HANDLER_MODULE_NAME"] = __vc_module
|
|
14
38
|
__vc_spec.loader.exec_module(__vc_module)
|