@vercel/python 5.0.1 → 5.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +158 -53
- package/package.json +1 -1
- package/vc_init.py +26 -2
package/dist/index.js
CHANGED
|
@@ -2651,7 +2651,7 @@ ${stderr}${stdout}`;
|
|
|
2651
2651
|
var require_lib = __commonJS({
|
|
2652
2652
|
"../../node_modules/.pnpm/which@3.0.0/node_modules/which/lib/index.js"(exports, module2) {
|
|
2653
2653
|
var isexe = require_isexe();
|
|
2654
|
-
var { join:
|
|
2654
|
+
var { join: join3, delimiter, sep, posix } = require("path");
|
|
2655
2655
|
var isWindows = process.platform === "win32";
|
|
2656
2656
|
var rSlash = new RegExp(`[${posix.sep}${sep === posix.sep ? "" : sep}]`.replace(/(\\)/g, "\\$1"));
|
|
2657
2657
|
var rRel = new RegExp(`^\\.${rSlash.source}`);
|
|
@@ -2680,7 +2680,7 @@ var require_lib = __commonJS({
|
|
|
2680
2680
|
var getPathPart = (raw, cmd) => {
|
|
2681
2681
|
const pathPart = /^".*"$/.test(raw) ? raw.slice(1, -1) : raw;
|
|
2682
2682
|
const prefix = !pathPart && rRel.test(cmd) ? cmd.slice(0, 2) : "";
|
|
2683
|
-
return prefix +
|
|
2683
|
+
return prefix + join3(pathPart, cmd);
|
|
2684
2684
|
};
|
|
2685
2685
|
var which2 = async (cmd, opt = {}) => {
|
|
2686
2686
|
const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt);
|
|
@@ -2749,11 +2749,12 @@ module.exports = __toCommonJS(src_exports);
|
|
|
2749
2749
|
var import_fs = __toESM(require("fs"));
|
|
2750
2750
|
var import_execa2 = __toESM(require_execa());
|
|
2751
2751
|
var import_util = require("util");
|
|
2752
|
-
var
|
|
2752
|
+
var import_path2 = require("path");
|
|
2753
2753
|
var import_build_utils3 = require("@vercel/build-utils");
|
|
2754
2754
|
|
|
2755
2755
|
// src/install.ts
|
|
2756
2756
|
var import_execa = __toESM(require_execa());
|
|
2757
|
+
var import_path = require("path");
|
|
2757
2758
|
var import_build_utils = require("@vercel/build-utils");
|
|
2758
2759
|
var makeDependencyCheckCode = (dependency) => `
|
|
2759
2760
|
from importlib import util
|
|
@@ -2768,7 +2769,8 @@ async function isInstalled(pythonPath, dependency, cwd) {
|
|
|
2768
2769
|
["-c", makeDependencyCheckCode(dependency)],
|
|
2769
2770
|
{
|
|
2770
2771
|
stdio: "pipe",
|
|
2771
|
-
cwd
|
|
2772
|
+
cwd,
|
|
2773
|
+
env: { ...process.env, PYTHONPATH: (0, import_path.join)(cwd, resolveVendorDir()) }
|
|
2772
2774
|
}
|
|
2773
2775
|
);
|
|
2774
2776
|
return stdout.startsWith(cwd);
|
|
@@ -2790,7 +2792,8 @@ async function areRequirementsInstalled(pythonPath, requirementsPath, cwd) {
|
|
|
2790
2792
|
["-c", makeRequirementsCheckCode(requirementsPath)],
|
|
2791
2793
|
{
|
|
2792
2794
|
stdio: "pipe",
|
|
2793
|
-
cwd
|
|
2795
|
+
cwd,
|
|
2796
|
+
env: { ...process.env, PYTHONPATH: (0, import_path.join)(cwd, resolveVendorDir()) }
|
|
2794
2797
|
}
|
|
2795
2798
|
);
|
|
2796
2799
|
return true;
|
|
@@ -2798,13 +2801,18 @@ async function areRequirementsInstalled(pythonPath, requirementsPath, cwd) {
|
|
|
2798
2801
|
return false;
|
|
2799
2802
|
}
|
|
2800
2803
|
}
|
|
2804
|
+
function resolveVendorDir() {
|
|
2805
|
+
const vendorDir = process.env.VERCEL_PYTHON_VENDOR_DIR || "_vendor";
|
|
2806
|
+
return vendorDir;
|
|
2807
|
+
}
|
|
2801
2808
|
async function pipInstall(pipPath, workPath, args) {
|
|
2802
|
-
const target =
|
|
2809
|
+
const target = resolveVendorDir();
|
|
2803
2810
|
process.env.PIP_USER = "0";
|
|
2804
2811
|
const cmdArgs = [
|
|
2805
2812
|
"install",
|
|
2806
2813
|
"--disable-pip-version-check",
|
|
2807
2814
|
"--no-compile",
|
|
2815
|
+
"--no-cache-dir",
|
|
2808
2816
|
"--target",
|
|
2809
2817
|
target,
|
|
2810
2818
|
...args
|
|
@@ -2959,14 +2967,32 @@ function isInstalled2({ pipPath, pythonPath }) {
|
|
|
2959
2967
|
// src/index.ts
|
|
2960
2968
|
var readFile = (0, import_util.promisify)(import_fs.default.readFile);
|
|
2961
2969
|
var writeFile = (0, import_util.promisify)(import_fs.default.writeFile);
|
|
2962
|
-
|
|
2970
|
+
var fastapiEntrypointFilenames = ["app", "index", "server", "main"];
|
|
2971
|
+
var fastapiEntrypointDirs = ["", "src", "app"];
|
|
2972
|
+
var fastapiContentRegex = /(from\s+fastapi\s+import\s+FastAPI|import\s+fastapi|FastAPI\s*\()/;
|
|
2973
|
+
var fastapiCandidateEntrypoints = fastapiEntrypointFilenames.flatMap(
|
|
2974
|
+
(filename) => fastapiEntrypointDirs.map((dir) => import_path2.posix.join(dir, `${filename}.py`))
|
|
2975
|
+
);
|
|
2976
|
+
function isFastapiEntrypoint(file) {
|
|
2977
|
+
try {
|
|
2978
|
+
const fsPath = file.fsPath;
|
|
2979
|
+
if (!fsPath)
|
|
2980
|
+
return false;
|
|
2981
|
+
const contents = import_fs.default.readFileSync(fsPath, "utf8");
|
|
2982
|
+
return fastapiContentRegex.test(contents);
|
|
2983
|
+
} catch {
|
|
2984
|
+
return false;
|
|
2985
|
+
}
|
|
2986
|
+
}
|
|
2987
|
+
async function pipenvConvert(cmd, srcDir, env) {
|
|
2963
2988
|
(0, import_build_utils3.debug)("Running pipfile2req...");
|
|
2964
2989
|
try {
|
|
2965
2990
|
const out = await import_execa2.default.stdout(cmd, [], {
|
|
2966
|
-
cwd: srcDir
|
|
2991
|
+
cwd: srcDir,
|
|
2992
|
+
env
|
|
2967
2993
|
});
|
|
2968
2994
|
(0, import_build_utils3.debug)("Contents of requirements.txt is: " + out);
|
|
2969
|
-
import_fs.default.writeFileSync((0,
|
|
2995
|
+
import_fs.default.writeFileSync((0, import_path2.join)(srcDir, "requirements.txt"), out);
|
|
2970
2996
|
} catch (err) {
|
|
2971
2997
|
console.log('Failed to run "pipfile2req"');
|
|
2972
2998
|
throw err;
|
|
@@ -2982,8 +3008,8 @@ async function downloadFilesInWorkPath({
|
|
|
2982
3008
|
(0, import_build_utils3.debug)("Downloading user files...");
|
|
2983
3009
|
let downloadedFiles = await (0, import_build_utils3.download)(files, workPath, meta);
|
|
2984
3010
|
if (meta.isDev) {
|
|
2985
|
-
const { devCacheDir = (0,
|
|
2986
|
-
const destCache = (0,
|
|
3011
|
+
const { devCacheDir = (0, import_path2.join)(workPath, ".now", "cache") } = meta;
|
|
3012
|
+
const destCache = (0, import_path2.join)(devCacheDir, (0, import_path2.basename)(entrypoint, ".py"));
|
|
2987
3013
|
await (0, import_build_utils3.download)(downloadedFiles, destCache);
|
|
2988
3014
|
downloadedFiles = await (0, import_build_utils3.glob)("**", destCache);
|
|
2989
3015
|
workPath = destCache;
|
|
@@ -3006,57 +3032,120 @@ var build = async ({
|
|
|
3006
3032
|
});
|
|
3007
3033
|
try {
|
|
3008
3034
|
if (meta.isDev) {
|
|
3009
|
-
const setupCfg = (0,
|
|
3035
|
+
const setupCfg = (0, import_path2.join)(workPath, "setup.cfg");
|
|
3010
3036
|
await writeFile(setupCfg, "[install]\nprefix=\n");
|
|
3011
3037
|
}
|
|
3012
3038
|
} catch (err) {
|
|
3013
3039
|
console.log('Failed to create "setup.cfg" file');
|
|
3014
3040
|
throw err;
|
|
3015
3041
|
}
|
|
3042
|
+
let fsFiles = await (0, import_build_utils3.glob)("**", workPath);
|
|
3043
|
+
if (!fsFiles[entrypoint]) {
|
|
3044
|
+
let discovered;
|
|
3045
|
+
if (config?.framework === "fastapi") {
|
|
3046
|
+
const entrypointCandidates = fastapiCandidateEntrypoints.filter(
|
|
3047
|
+
(c) => !!fsFiles[c]
|
|
3048
|
+
);
|
|
3049
|
+
if (entrypointCandidates.length) {
|
|
3050
|
+
const fastapiEntrypoint = entrypointCandidates.find(
|
|
3051
|
+
(c) => isFastapiEntrypoint(fsFiles[c])
|
|
3052
|
+
);
|
|
3053
|
+
discovered = fastapiEntrypoint || entrypointCandidates[0];
|
|
3054
|
+
}
|
|
3055
|
+
}
|
|
3056
|
+
if (discovered) {
|
|
3057
|
+
(0, import_build_utils3.debug)(
|
|
3058
|
+
`Resolved Python entrypoint to "${discovered}" (configured "${entrypoint}" not found).`
|
|
3059
|
+
);
|
|
3060
|
+
entrypoint = discovered;
|
|
3061
|
+
} else if (config?.framework === "fastapi") {
|
|
3062
|
+
const searchedList = fastapiCandidateEntrypoints.join(", ");
|
|
3063
|
+
throw new import_build_utils3.NowBuildError({
|
|
3064
|
+
code: "FASTAPI_ENTRYPOINT_NOT_FOUND",
|
|
3065
|
+
message: `No FastAPI entrypoint found. Searched for: ${searchedList}`
|
|
3066
|
+
});
|
|
3067
|
+
}
|
|
3068
|
+
}
|
|
3069
|
+
const entryDirectory = (0, import_path2.dirname)(entrypoint);
|
|
3070
|
+
const hasReqLocal = !!fsFiles[(0, import_path2.join)(entryDirectory, "requirements.txt")];
|
|
3071
|
+
const hasReqGlobal = !!fsFiles["requirements.txt"];
|
|
3072
|
+
const pipfileLockDir = fsFiles[(0, import_path2.join)(entryDirectory, "Pipfile.lock")] ? (0, import_path2.join)(workPath, entryDirectory) : fsFiles["Pipfile.lock"] ? workPath : null;
|
|
3073
|
+
const pipfileDir = fsFiles[(0, import_path2.join)(entryDirectory, "Pipfile")] ? (0, import_path2.join)(workPath, entryDirectory) : fsFiles["Pipfile"] ? workPath : null;
|
|
3074
|
+
if (!hasReqLocal && !hasReqGlobal && (pipfileLockDir || pipfileDir)) {
|
|
3075
|
+
if (pipfileLockDir) {
|
|
3076
|
+
(0, import_build_utils3.debug)('Found "Pipfile.lock"');
|
|
3077
|
+
} else {
|
|
3078
|
+
(0, import_build_utils3.debug)('Found "Pipfile"');
|
|
3079
|
+
}
|
|
3080
|
+
if (pipfileLockDir) {
|
|
3081
|
+
let lock = {};
|
|
3082
|
+
try {
|
|
3083
|
+
const json = await readFile(
|
|
3084
|
+
(0, import_path2.join)(pipfileLockDir, "Pipfile.lock"),
|
|
3085
|
+
"utf8"
|
|
3086
|
+
);
|
|
3087
|
+
lock = JSON.parse(json);
|
|
3088
|
+
} catch (err) {
|
|
3089
|
+
throw new import_build_utils3.NowBuildError({
|
|
3090
|
+
code: "INVALID_PIPFILE_LOCK",
|
|
3091
|
+
message: "Unable to parse Pipfile.lock"
|
|
3092
|
+
});
|
|
3093
|
+
}
|
|
3094
|
+
pythonVersion = getSupportedPythonVersion({
|
|
3095
|
+
isDev: meta.isDev,
|
|
3096
|
+
pipLockPythonVersion: lock?._meta?.requires?.python_version
|
|
3097
|
+
});
|
|
3098
|
+
}
|
|
3099
|
+
if (!hasReqLocal && !hasReqGlobal) {
|
|
3100
|
+
const tempDir = await (0, import_build_utils3.getWriteableDirectory)();
|
|
3101
|
+
await installRequirement({
|
|
3102
|
+
pythonPath: pythonVersion.pythonPath,
|
|
3103
|
+
pipPath: pythonVersion.pipPath,
|
|
3104
|
+
dependency: "pipfile-requirements",
|
|
3105
|
+
version: "0.3.0",
|
|
3106
|
+
workPath: tempDir,
|
|
3107
|
+
meta,
|
|
3108
|
+
args: ["--no-warn-script-location"]
|
|
3109
|
+
});
|
|
3110
|
+
const tempVendorDir = (0, import_path2.join)(tempDir, resolveVendorDir());
|
|
3111
|
+
const envForConvert = { ...process.env, PYTHONPATH: tempVendorDir };
|
|
3112
|
+
const convertCmd = process.platform === "win32" ? (0, import_path2.join)(tempVendorDir, "Scripts", "pipfile2req.exe") : (0, import_path2.join)(tempVendorDir, "bin", "pipfile2req");
|
|
3113
|
+
await pipenvConvert(
|
|
3114
|
+
convertCmd,
|
|
3115
|
+
pipfileLockDir || pipfileDir,
|
|
3116
|
+
envForConvert
|
|
3117
|
+
);
|
|
3118
|
+
} else {
|
|
3119
|
+
(0, import_build_utils3.debug)(
|
|
3120
|
+
'Skipping Pipfile.lock conversion because "requirements.txt" exists'
|
|
3121
|
+
);
|
|
3122
|
+
}
|
|
3123
|
+
}
|
|
3124
|
+
fsFiles = await (0, import_build_utils3.glob)("**", workPath);
|
|
3125
|
+
const requirementsTxt = (0, import_path2.join)(entryDirectory, "requirements.txt");
|
|
3126
|
+
const vendorBaseDir = (0, import_path2.join)(
|
|
3127
|
+
workPath,
|
|
3128
|
+
".vercel",
|
|
3129
|
+
"cache",
|
|
3130
|
+
"python",
|
|
3131
|
+
`py${pythonVersion.version}`,
|
|
3132
|
+
entryDirectory
|
|
3133
|
+
);
|
|
3134
|
+
try {
|
|
3135
|
+
await import_fs.default.promises.mkdir(vendorBaseDir, { recursive: true });
|
|
3136
|
+
} catch (err) {
|
|
3137
|
+
console.log("Failed to create vendor cache directory");
|
|
3138
|
+
throw err;
|
|
3139
|
+
}
|
|
3016
3140
|
console.log("Installing required dependencies...");
|
|
3017
3141
|
await installRequirement({
|
|
3018
3142
|
pythonPath: pythonVersion.pythonPath,
|
|
3019
3143
|
pipPath: pythonVersion.pipPath,
|
|
3020
3144
|
dependency: "werkzeug",
|
|
3021
3145
|
version: "1.0.1",
|
|
3022
|
-
workPath,
|
|
3146
|
+
workPath: vendorBaseDir,
|
|
3023
3147
|
meta
|
|
3024
3148
|
});
|
|
3025
|
-
let fsFiles = await (0, import_build_utils3.glob)("**", workPath);
|
|
3026
|
-
const entryDirectory = (0, import_path.dirname)(entrypoint);
|
|
3027
|
-
const pipfileLockDir = fsFiles[(0, import_path.join)(entryDirectory, "Pipfile.lock")] ? (0, import_path.join)(workPath, entryDirectory) : fsFiles["Pipfile.lock"] ? workPath : null;
|
|
3028
|
-
if (pipfileLockDir) {
|
|
3029
|
-
(0, import_build_utils3.debug)('Found "Pipfile.lock"');
|
|
3030
|
-
let lock = {};
|
|
3031
|
-
try {
|
|
3032
|
-
const json = await readFile((0, import_path.join)(pipfileLockDir, "Pipfile.lock"), "utf8");
|
|
3033
|
-
lock = JSON.parse(json);
|
|
3034
|
-
} catch (err) {
|
|
3035
|
-
throw new import_build_utils3.NowBuildError({
|
|
3036
|
-
code: "INVALID_PIPFILE_LOCK",
|
|
3037
|
-
message: "Unable to parse Pipfile.lock"
|
|
3038
|
-
});
|
|
3039
|
-
}
|
|
3040
|
-
pythonVersion = getSupportedPythonVersion({
|
|
3041
|
-
isDev: meta.isDev,
|
|
3042
|
-
pipLockPythonVersion: lock?._meta?.requires?.python_version
|
|
3043
|
-
});
|
|
3044
|
-
const tempDir = await (0, import_build_utils3.getWriteableDirectory)();
|
|
3045
|
-
await installRequirement({
|
|
3046
|
-
pythonPath: pythonVersion.pythonPath,
|
|
3047
|
-
pipPath: pythonVersion.pipPath,
|
|
3048
|
-
dependency: "pipfile-requirements",
|
|
3049
|
-
version: "0.3.0",
|
|
3050
|
-
workPath: tempDir,
|
|
3051
|
-
meta,
|
|
3052
|
-
args: ["--no-warn-script-location"]
|
|
3053
|
-
});
|
|
3054
|
-
process.env.PYTHONPATH = tempDir;
|
|
3055
|
-
const convertCmd = (0, import_path.join)(tempDir, "bin", "pipfile2req");
|
|
3056
|
-
await pipenvConvert(convertCmd, pipfileLockDir);
|
|
3057
|
-
}
|
|
3058
|
-
fsFiles = await (0, import_build_utils3.glob)("**", workPath);
|
|
3059
|
-
const requirementsTxt = (0, import_path.join)(entryDirectory, "requirements.txt");
|
|
3060
3149
|
if (fsFiles[requirementsTxt]) {
|
|
3061
3150
|
(0, import_build_utils3.debug)('Found local "requirements.txt"');
|
|
3062
3151
|
const requirementsTxtPath = fsFiles[requirementsTxt].fsPath;
|
|
@@ -3064,7 +3153,7 @@ var build = async ({
|
|
|
3064
3153
|
pythonPath: pythonVersion.pythonPath,
|
|
3065
3154
|
pipPath: pythonVersion.pipPath,
|
|
3066
3155
|
filePath: requirementsTxtPath,
|
|
3067
|
-
workPath,
|
|
3156
|
+
workPath: vendorBaseDir,
|
|
3068
3157
|
meta
|
|
3069
3158
|
});
|
|
3070
3159
|
} else if (fsFiles["requirements.txt"]) {
|
|
@@ -3074,20 +3163,22 @@ var build = async ({
|
|
|
3074
3163
|
pythonPath: pythonVersion.pythonPath,
|
|
3075
3164
|
pipPath: pythonVersion.pipPath,
|
|
3076
3165
|
filePath: requirementsTxtPath,
|
|
3077
|
-
workPath,
|
|
3166
|
+
workPath: vendorBaseDir,
|
|
3078
3167
|
meta
|
|
3079
3168
|
});
|
|
3080
3169
|
}
|
|
3081
|
-
const originalPyPath = (0,
|
|
3170
|
+
const originalPyPath = (0, import_path2.join)(__dirname, "..", "vc_init.py");
|
|
3082
3171
|
const originalHandlerPyContents = await readFile(originalPyPath, "utf8");
|
|
3083
3172
|
(0, import_build_utils3.debug)("Entrypoint is", entrypoint);
|
|
3084
3173
|
const moduleName = entrypoint.replace(/\//g, ".").replace(/\.py$/, "");
|
|
3174
|
+
const vendorDir = resolveVendorDir();
|
|
3085
3175
|
const suffix = meta.isDev && !entrypoint.endsWith(".py") ? ".py" : "";
|
|
3086
3176
|
const entrypointWithSuffix = `${entrypoint}${suffix}`;
|
|
3087
3177
|
(0, import_build_utils3.debug)("Entrypoint with suffix is", entrypointWithSuffix);
|
|
3088
|
-
const handlerPyContents = originalHandlerPyContents.replace(/__VC_HANDLER_MODULE_NAME/g, moduleName).replace(/__VC_HANDLER_ENTRYPOINT/g, entrypointWithSuffix);
|
|
3178
|
+
const handlerPyContents = originalHandlerPyContents.replace(/__VC_HANDLER_MODULE_NAME/g, moduleName).replace(/__VC_HANDLER_ENTRYPOINT/g, entrypointWithSuffix).replace(/__VC_HANDLER_VENDOR_DIR/g, vendorDir);
|
|
3089
3179
|
const predefinedExcludes = [
|
|
3090
3180
|
".git/**",
|
|
3181
|
+
".gitignore",
|
|
3091
3182
|
".vercel/**",
|
|
3092
3183
|
".pnpm-store/**",
|
|
3093
3184
|
"**/node_modules/**",
|
|
@@ -3097,11 +3188,25 @@ var build = async ({
|
|
|
3097
3188
|
"**/venv/**",
|
|
3098
3189
|
"**/__pycache__/**"
|
|
3099
3190
|
];
|
|
3191
|
+
const lambdaEnv = {};
|
|
3192
|
+
lambdaEnv.PYTHONPATH = vendorDir;
|
|
3100
3193
|
const globOptions = {
|
|
3101
3194
|
cwd: workPath,
|
|
3102
3195
|
ignore: config && typeof config.excludeFiles === "string" ? [...predefinedExcludes, config.excludeFiles] : predefinedExcludes
|
|
3103
3196
|
};
|
|
3104
3197
|
const files = await (0, import_build_utils3.glob)("**", globOptions);
|
|
3198
|
+
try {
|
|
3199
|
+
const cachedVendorAbs = (0, import_path2.join)(vendorBaseDir, resolveVendorDir());
|
|
3200
|
+
if (import_fs.default.existsSync(cachedVendorAbs)) {
|
|
3201
|
+
const vendorFiles = await (0, import_build_utils3.glob)("**", cachedVendorAbs, resolveVendorDir());
|
|
3202
|
+
for (const [p, f] of Object.entries(vendorFiles)) {
|
|
3203
|
+
files[p] = f;
|
|
3204
|
+
}
|
|
3205
|
+
}
|
|
3206
|
+
} catch (err) {
|
|
3207
|
+
console.log("Failed to include cached vendor directory");
|
|
3208
|
+
throw err;
|
|
3209
|
+
}
|
|
3105
3210
|
const handlerPyFilename = "vc__handler__python";
|
|
3106
3211
|
files[`${handlerPyFilename}.py`] = new import_build_utils3.FileBlob({ data: handlerPyContents });
|
|
3107
3212
|
if (config.framework === "fasthtml") {
|
|
@@ -3112,7 +3217,7 @@ var build = async ({
|
|
|
3112
3217
|
files,
|
|
3113
3218
|
handler: `${handlerPyFilename}.vc_handler`,
|
|
3114
3219
|
runtime: pythonVersion.runtime,
|
|
3115
|
-
environment:
|
|
3220
|
+
environment: lambdaEnv,
|
|
3116
3221
|
supportsResponseStreaming: true
|
|
3117
3222
|
});
|
|
3118
3223
|
return { output };
|
package/package.json
CHANGED
package/vc_init.py
CHANGED
|
@@ -1,14 +1,38 @@
|
|
|
1
1
|
import sys
|
|
2
|
+
import os
|
|
3
|
+
import site
|
|
4
|
+
import importlib
|
|
2
5
|
import base64
|
|
3
6
|
import json
|
|
4
7
|
import inspect
|
|
5
8
|
from importlib import util
|
|
6
9
|
from http.server import BaseHTTPRequestHandler
|
|
7
10
|
import socket
|
|
8
|
-
|
|
11
|
+
|
|
12
|
+
_here = os.path.dirname(__file__)
|
|
13
|
+
_vendor_rel = '__VC_HANDLER_VENDOR_DIR'
|
|
14
|
+
_vendor = os.path.normpath(os.path.join(_here, _vendor_rel))
|
|
15
|
+
|
|
16
|
+
if os.path.isdir(_vendor):
|
|
17
|
+
# Process .pth files like a real site-packages dir
|
|
18
|
+
site.addsitedir(_vendor)
|
|
19
|
+
|
|
20
|
+
# Move _vendor to the front (after script dir if present)
|
|
21
|
+
try:
|
|
22
|
+
while _vendor in sys.path:
|
|
23
|
+
sys.path.remove(_vendor)
|
|
24
|
+
except ValueError:
|
|
25
|
+
pass
|
|
26
|
+
|
|
27
|
+
# Put vendored deps ahead of site-packages but after the script dir
|
|
28
|
+
idx = 1 if (sys.path and sys.path[0] in ('', _here)) else 0
|
|
29
|
+
sys.path.insert(idx, _vendor)
|
|
30
|
+
|
|
31
|
+
importlib.invalidate_caches()
|
|
9
32
|
|
|
10
33
|
# Import relative path https://docs.python.org/3/library/importlib.html#importing-a-source-file-directly
|
|
11
|
-
|
|
34
|
+
user_mod_path = os.path.join(_here, "__VC_HANDLER_ENTRYPOINT") # absolute
|
|
35
|
+
__vc_spec = util.spec_from_file_location("__VC_HANDLER_MODULE_NAME", user_mod_path)
|
|
12
36
|
__vc_module = util.module_from_spec(__vc_spec)
|
|
13
37
|
sys.modules["__VC_HANDLER_MODULE_NAME"] = __vc_module
|
|
14
38
|
__vc_spec.loader.exec_module(__vc_module)
|