@vercel/build-utils 2.12.3-canary.4 → 2.12.3-canary.40
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/convert-runtime-to-plugin.d.ts +65 -0
- package/dist/convert-runtime-to-plugin.js +363 -0
- package/dist/detect-builders.d.ts +6 -0
- package/dist/detect-builders.js +58 -5
- package/dist/fs/glob.js +2 -1
- package/dist/fs/normalize-path.d.ts +4 -0
- package/dist/fs/normalize-path.js +11 -0
- package/dist/get-ignore-filter.d.ts +1 -0
- package/dist/get-ignore-filter.js +59 -0
- package/dist/index.d.ts +10 -1
- package/dist/index.js +2852 -1581
- package/dist/lambda.d.ts +7 -3
- package/dist/lambda.js +14 -4
- package/dist/prerender.d.ts +3 -1
- package/dist/prerender.js +10 -1
- package/dist/types.d.ts +2 -0
- package/package.json +6 -6
@@ -0,0 +1,65 @@
|
|
1
|
+
import { Lambda } from './lambda';
|
2
|
+
import type { BuildOptions } from './types';
|
3
|
+
/**
|
4
|
+
* Convert legacy Runtime to a Plugin.
|
5
|
+
* @param buildRuntime - a legacy build() function from a Runtime
|
6
|
+
* @param packageName - the name of the package, for example `vercel-plugin-python`
|
7
|
+
* @param ext - the file extension, for example `.py`
|
8
|
+
*/
|
9
|
+
export declare function convertRuntimeToPlugin(buildRuntime: (options: BuildOptions) => Promise<{
|
10
|
+
output: Lambda;
|
11
|
+
}>, packageName: string, ext: string): ({ workPath }: {
|
12
|
+
workPath: string;
|
13
|
+
}) => Promise<void>;
|
14
|
+
/**
|
15
|
+
* If `.output/functions-manifest.json` exists, append to the pages
|
16
|
+
* property. Otherwise write a new file.
|
17
|
+
*/
|
18
|
+
export declare function updateFunctionsManifest({ workPath, pages, }: {
|
19
|
+
workPath: string;
|
20
|
+
pages: {
|
21
|
+
[key: string]: any;
|
22
|
+
};
|
23
|
+
}): Promise<void>;
|
24
|
+
/**
|
25
|
+
* Append routes to the `routes-manifest.json` file.
|
26
|
+
* If the file does not exist, it will be created.
|
27
|
+
*/
|
28
|
+
export declare function updateRoutesManifest({ workPath, redirects, rewrites, headers, dynamicRoutes, staticRoutes, }: {
|
29
|
+
workPath: string;
|
30
|
+
redirects?: {
|
31
|
+
source: string;
|
32
|
+
destination: string;
|
33
|
+
statusCode: number;
|
34
|
+
regex: string;
|
35
|
+
}[];
|
36
|
+
rewrites?: {
|
37
|
+
source: string;
|
38
|
+
destination: string;
|
39
|
+
regex: string;
|
40
|
+
}[];
|
41
|
+
headers?: {
|
42
|
+
source: string;
|
43
|
+
headers: {
|
44
|
+
key: string;
|
45
|
+
value: string;
|
46
|
+
}[];
|
47
|
+
regex: string;
|
48
|
+
}[];
|
49
|
+
dynamicRoutes?: {
|
50
|
+
page: string;
|
51
|
+
regex: string;
|
52
|
+
namedRegex?: string;
|
53
|
+
routeKeys?: {
|
54
|
+
[named: string]: string;
|
55
|
+
};
|
56
|
+
}[];
|
57
|
+
staticRoutes?: {
|
58
|
+
page: string;
|
59
|
+
regex: string;
|
60
|
+
namedRegex?: string;
|
61
|
+
routeKeys?: {
|
62
|
+
[named: string]: string;
|
63
|
+
};
|
64
|
+
}[];
|
65
|
+
}): Promise<void>;
|
@@ -0,0 +1,363 @@
|
|
1
|
+
"use strict";
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
4
|
+
};
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
6
|
+
exports.updateRoutesManifest = exports.updateFunctionsManifest = exports.convertRuntimeToPlugin = void 0;
|
7
|
+
const fs_extra_1 = __importDefault(require("fs-extra"));
|
8
|
+
const path_1 = require("path");
|
9
|
+
const glob_1 = __importDefault(require("./fs/glob"));
|
10
|
+
const normalize_path_1 = require("./fs/normalize-path");
|
11
|
+
const lambda_1 = require("./lambda");
|
12
|
+
const _1 = require(".");
|
13
|
+
// `.output` was already created by the Build Command, so we have
|
14
|
+
// to ensure its contents don't get bundled into the Lambda. Similarily,
|
15
|
+
// we don't want to bundle anything from `.vercel` either. Lastly,
|
16
|
+
// Builders/Runtimes didn't have `vercel.json` or `now.json`.
|
17
|
+
const ignoredPaths = ['.output', '.vercel', 'vercel.json', 'now.json'];
|
18
|
+
const shouldIgnorePath = (file, ignoreFilter, ignoreFile) => {
|
19
|
+
const isNative = ignoredPaths.some(item => {
|
20
|
+
return file.startsWith(item);
|
21
|
+
});
|
22
|
+
if (!ignoreFile) {
|
23
|
+
return isNative;
|
24
|
+
}
|
25
|
+
return isNative || ignoreFilter(file);
|
26
|
+
};
|
27
|
+
const getSourceFiles = async (workPath, ignoreFilter) => {
|
28
|
+
const list = await glob_1.default('**', {
|
29
|
+
cwd: workPath,
|
30
|
+
});
|
31
|
+
// We're not passing this as an `ignore` filter to the `glob` function above,
|
32
|
+
// so that we can re-use exactly the same `getIgnoreFilter` method that the
|
33
|
+
// Build Step uses (literally the same code). Note that this exclusion only applies
|
34
|
+
// when deploying. Locally, another exclusion is needed, which is handled
|
35
|
+
// further below in the `convertRuntimeToPlugin` function.
|
36
|
+
for (const file in list) {
|
37
|
+
if (shouldIgnorePath(file, ignoreFilter, true)) {
|
38
|
+
delete list[file];
|
39
|
+
}
|
40
|
+
}
|
41
|
+
return list;
|
42
|
+
};
|
43
|
+
/**
|
44
|
+
* Convert legacy Runtime to a Plugin.
|
45
|
+
* @param buildRuntime - a legacy build() function from a Runtime
|
46
|
+
* @param packageName - the name of the package, for example `vercel-plugin-python`
|
47
|
+
* @param ext - the file extension, for example `.py`
|
48
|
+
*/
|
49
|
+
function convertRuntimeToPlugin(buildRuntime, packageName, ext) {
|
50
|
+
// This `build()` signature should match `plugin.build()` signature in `vercel build`.
|
51
|
+
return async function build({ workPath }) {
|
52
|
+
// We also don't want to provide any files to Runtimes that were ignored
|
53
|
+
// through `.vercelignore` or `.nowignore`, because the Build Step does the same.
|
54
|
+
const ignoreFilter = await _1.getIgnoreFilter(workPath);
|
55
|
+
// Retrieve the files that are currently available on the File System,
|
56
|
+
// before the Legacy Runtime has even started to build.
|
57
|
+
const sourceFilesPreBuild = await getSourceFiles(workPath, ignoreFilter);
|
58
|
+
// Instead of doing another `glob` to get all the matching source files,
|
59
|
+
// we'll filter the list of existing files down to only the ones
|
60
|
+
// that are matching the entrypoint pattern, so we're first creating
|
61
|
+
// a clean new list to begin.
|
62
|
+
const entrypoints = Object.assign({}, sourceFilesPreBuild);
|
63
|
+
const entrypointMatch = new RegExp(`^api/.*${ext}$`);
|
64
|
+
// Up next, we'll strip out the files from the list of entrypoints
|
65
|
+
// that aren't actually considered entrypoints.
|
66
|
+
for (const file in entrypoints) {
|
67
|
+
if (!entrypointMatch.test(file)) {
|
68
|
+
delete entrypoints[file];
|
69
|
+
}
|
70
|
+
}
|
71
|
+
const pages = {};
|
72
|
+
const pluginName = packageName.replace('vercel-plugin-', '');
|
73
|
+
const outputPath = path_1.join(workPath, '.output');
|
74
|
+
const traceDir = path_1.join(outputPath, `inputs`,
|
75
|
+
// Legacy Runtimes can only provide API Routes, so that's
|
76
|
+
// why we can use this prefix for all of them. Here, we have to
|
77
|
+
// make sure to not use a cryptic hash name, because people
|
78
|
+
// need to be able to easily inspect the output.
|
79
|
+
`api-routes-${pluginName}`);
|
80
|
+
await fs_extra_1.default.ensureDir(traceDir);
|
81
|
+
let newPathsRuntime = new Set();
|
82
|
+
const entryRoot = path_1.join(outputPath, 'server', 'pages');
|
83
|
+
for (const entrypoint of Object.keys(entrypoints)) {
|
84
|
+
const { output } = await buildRuntime({
|
85
|
+
files: sourceFilesPreBuild,
|
86
|
+
entrypoint,
|
87
|
+
workPath,
|
88
|
+
config: {
|
89
|
+
zeroConfig: true,
|
90
|
+
},
|
91
|
+
meta: {
|
92
|
+
avoidTopLevelInstall: true,
|
93
|
+
skipDownload: true,
|
94
|
+
},
|
95
|
+
});
|
96
|
+
// Legacy Runtimes tend to pollute the `workPath` with compiled results,
|
97
|
+
// because the `workPath` used to be a place that was a place where they could
|
98
|
+
// just put anything, but nowadays it's the working directory of the `vercel build`
|
99
|
+
// command, which is the place where the developer keeps their source files,
|
100
|
+
// so we don't want to pollute this space unnecessarily. That means we have to clean
|
101
|
+
// up files that were created by the build, which is done further below.
|
102
|
+
const sourceFilesAfterBuild = await getSourceFiles(workPath, ignoreFilter);
|
103
|
+
// @ts-ignore This symbol is a private API
|
104
|
+
const lambdaFiles = output[lambda_1.FILES_SYMBOL];
|
105
|
+
// When deploying, the `files` that are passed to the Legacy Runtimes already
|
106
|
+
// have certain files that are ignored stripped, but locally, that list of
|
107
|
+
// files isn't used by the Legacy Runtimes, so we need to apply the filters
|
108
|
+
// to the outputs that they are returning instead.
|
109
|
+
for (const file in lambdaFiles) {
|
110
|
+
if (shouldIgnorePath(file, ignoreFilter, false)) {
|
111
|
+
delete lambdaFiles[file];
|
112
|
+
}
|
113
|
+
}
|
114
|
+
let handlerFileBase = output.handler;
|
115
|
+
let handlerFile = lambdaFiles[handlerFileBase];
|
116
|
+
let handlerHasImport = false;
|
117
|
+
const { handler } = output;
|
118
|
+
const handlerMethod = handler.split('.').pop();
|
119
|
+
const handlerFileName = handler.replace(`.${handlerMethod}`, '');
|
120
|
+
// For compiled languages, the launcher file for the Lambda generated
|
121
|
+
// by the Legacy Runtime matches the `handler` defined for it, but for
|
122
|
+
// interpreted languages, the `handler` consists of the launcher file name
|
123
|
+
// without an extension, plus the name of the method inside of that file
|
124
|
+
// that should be invoked, so we have to construct the file path explicitly.
|
125
|
+
if (!handlerFile) {
|
126
|
+
handlerFileBase = handlerFileName + ext;
|
127
|
+
handlerFile = lambdaFiles[handlerFileBase];
|
128
|
+
handlerHasImport = true;
|
129
|
+
}
|
130
|
+
if (!handlerFile || !handlerFile.fsPath) {
|
131
|
+
throw new Error(`Could not find a handler file. Please ensure that \`files\` for the returned \`Lambda\` contains an \`FileFsRef\` named "${handlerFileBase}" with a valid \`fsPath\`.`);
|
132
|
+
}
|
133
|
+
const handlerExtName = path_1.extname(handlerFile.fsPath);
|
134
|
+
const entryBase = path_1.basename(entrypoint).replace(ext, handlerExtName);
|
135
|
+
const entryPath = path_1.join(path_1.dirname(entrypoint), entryBase);
|
136
|
+
const entry = path_1.join(entryRoot, entryPath);
|
137
|
+
// We never want to link here, only copy, because the launcher
|
138
|
+
// file often has the same name for every entrypoint, which means that
|
139
|
+
// every build for every entrypoint overwrites the launcher of the previous
|
140
|
+
// one, so linking would end with a broken reference.
|
141
|
+
await fs_extra_1.default.ensureDir(path_1.dirname(entry));
|
142
|
+
await fs_extra_1.default.copy(handlerFile.fsPath, entry);
|
143
|
+
// For compiled languages, the launcher file will be binary and therefore
|
144
|
+
// won't try to import a user-provided request handler (instead, it will
|
145
|
+
// contain it). But for interpreted languages, the launcher might try to
|
146
|
+
// load a user-provided request handler from the source file instead of bundling
|
147
|
+
// it, so we have to adjust the import statement inside the launcher to point
|
148
|
+
// to the respective source file. Previously, Legacy Runtimes simply expected
|
149
|
+
// the user-provided request-handler to be copied right next to the launcher,
|
150
|
+
// but with the new File System API, files won't be moved around unnecessarily.
|
151
|
+
if (handlerHasImport) {
|
152
|
+
const { fsPath } = handlerFile;
|
153
|
+
const encoding = 'utf-8';
|
154
|
+
// This is the true directory of the user-provided request handler in the
|
155
|
+
// source files, so that's what we will use as an import path in the launcher.
|
156
|
+
const locationPrefix = path_1.relative(entry, outputPath);
|
157
|
+
let handlerContent = await fs_extra_1.default.readFile(fsPath, encoding);
|
158
|
+
const importPaths = [
|
159
|
+
// This is the full entrypoint path, like `./api/test.py`
|
160
|
+
`./${entrypoint}`,
|
161
|
+
// This is the entrypoint path without extension, like `api/test`
|
162
|
+
entrypoint.slice(0, -ext.length),
|
163
|
+
];
|
164
|
+
// Generate a list of regular expressions that we can use for
|
165
|
+
// finding matches, but only allow matches if the import path is
|
166
|
+
// wrapped inside single (') or double quotes (").
|
167
|
+
const patterns = importPaths.map(path => {
|
168
|
+
// eslint-disable-next-line no-useless-escape
|
169
|
+
return new RegExp(`('|")(${path.replace(/\./g, '\\.')})('|")`, 'g');
|
170
|
+
});
|
171
|
+
let replacedMatch = null;
|
172
|
+
for (const pattern of patterns) {
|
173
|
+
const newContent = handlerContent.replace(pattern, (_, p1, p2, p3) => {
|
174
|
+
return `${p1}${path_1.join(locationPrefix, p2)}${p3}`;
|
175
|
+
});
|
176
|
+
if (newContent !== handlerContent) {
|
177
|
+
_1.debug(`Replaced "${pattern}" inside "${entry}" to ensure correct import of user-provided request handler`);
|
178
|
+
handlerContent = newContent;
|
179
|
+
replacedMatch = true;
|
180
|
+
}
|
181
|
+
}
|
182
|
+
if (!replacedMatch) {
|
183
|
+
new Error(`No replacable matches for "${importPaths[0]}" or "${importPaths[1]}" found in "${fsPath}"`);
|
184
|
+
}
|
185
|
+
await fs_extra_1.default.writeFile(entry, handlerContent, encoding);
|
186
|
+
}
|
187
|
+
else {
|
188
|
+
await fs_extra_1.default.copy(handlerFile.fsPath, entry);
|
189
|
+
}
|
190
|
+
const newFilesEntrypoint = [];
|
191
|
+
const newDirectoriesEntrypoint = [];
|
192
|
+
const preBuildFiles = Object.values(sourceFilesPreBuild).map(file => {
|
193
|
+
return file.fsPath;
|
194
|
+
});
|
195
|
+
// Generate a list of directories and files that weren't present
|
196
|
+
// before the entrypoint was processed by the Legacy Runtime, so
|
197
|
+
// that we can perform a cleanup later. We need to divide into files
|
198
|
+
// and directories because only cleaning up files might leave empty
|
199
|
+
// directories, and listing directories separately also speeds up the
|
200
|
+
// build because we can just delete them, which wipes all of their nested
|
201
|
+
// paths, instead of iterating through all files that should be deleted.
|
202
|
+
for (const file in sourceFilesAfterBuild) {
|
203
|
+
if (!sourceFilesPreBuild[file]) {
|
204
|
+
const path = sourceFilesAfterBuild[file].fsPath;
|
205
|
+
const dirPath = path_1.dirname(path);
|
206
|
+
// If none of the files that were present before the entrypoint
|
207
|
+
// was processed are contained within the directory we're looking
|
208
|
+
// at right now, then we know it's a newly added directory
|
209
|
+
// and it can therefore be removed later on.
|
210
|
+
const isNewDir = !preBuildFiles.some(filePath => {
|
211
|
+
return path_1.dirname(filePath).startsWith(dirPath);
|
212
|
+
});
|
213
|
+
// Check out the list of tracked directories that were
|
214
|
+
// newly added and see if one of them contains the path
|
215
|
+
// we're looking at.
|
216
|
+
const hasParentDir = newDirectoriesEntrypoint.some(dir => {
|
217
|
+
return path.startsWith(dir);
|
218
|
+
});
|
219
|
+
// If we have already tracked a directory that was newly
|
220
|
+
// added that sits above the file or directory that we're
|
221
|
+
// looking at, we don't need to add more entries to the list
|
222
|
+
// because when the parent will get removed in the future,
|
223
|
+
// all of its children (and therefore the path we're looking at)
|
224
|
+
// will automatically get removed anyways.
|
225
|
+
if (hasParentDir) {
|
226
|
+
continue;
|
227
|
+
}
|
228
|
+
if (isNewDir) {
|
229
|
+
newDirectoriesEntrypoint.push(dirPath);
|
230
|
+
}
|
231
|
+
else {
|
232
|
+
newFilesEntrypoint.push(path);
|
233
|
+
}
|
234
|
+
}
|
235
|
+
}
|
236
|
+
const nft = `${entry}.nft.json`;
|
237
|
+
const json = JSON.stringify({
|
238
|
+
version: 2,
|
239
|
+
files: Object.keys(lambdaFiles)
|
240
|
+
.map(file => {
|
241
|
+
const { fsPath } = lambdaFiles[file];
|
242
|
+
if (!fsPath) {
|
243
|
+
throw new Error(`File "${file}" is missing valid \`fsPath\` property`);
|
244
|
+
}
|
245
|
+
// The handler was already moved into position above.
|
246
|
+
if (file === handlerFileBase) {
|
247
|
+
return;
|
248
|
+
}
|
249
|
+
return normalize_path_1.normalizePath(path_1.relative(path_1.dirname(nft), fsPath));
|
250
|
+
})
|
251
|
+
.filter(Boolean),
|
252
|
+
});
|
253
|
+
await fs_extra_1.default.ensureDir(path_1.dirname(nft));
|
254
|
+
await fs_extra_1.default.writeFile(nft, json);
|
255
|
+
// Extend the list of directories and files that were created by the
|
256
|
+
// Legacy Runtime with the list of directories and files that were
|
257
|
+
// created for the entrypoint that was just processed above.
|
258
|
+
newPathsRuntime = new Set([
|
259
|
+
...newPathsRuntime,
|
260
|
+
...newFilesEntrypoint,
|
261
|
+
...newDirectoriesEntrypoint,
|
262
|
+
]);
|
263
|
+
// Add an entry that will later on be added to the `functions-manifest.json`
|
264
|
+
// file that is placed inside of the `.output` directory.
|
265
|
+
pages[normalize_path_1.normalizePath(entryPath)] = {
|
266
|
+
// Because the underlying file used as a handler was placed
|
267
|
+
// inside `.output/server/pages/api`, it no longer has the name it originally
|
268
|
+
// had and is now named after the API Route that it's responsible for,
|
269
|
+
// so we have to adjust the name of the Lambda handler accordingly.
|
270
|
+
handler: handler.replace(handlerFileName, path_1.parse(entry).name),
|
271
|
+
runtime: output.runtime,
|
272
|
+
memory: output.memory,
|
273
|
+
maxDuration: output.maxDuration,
|
274
|
+
environment: output.environment,
|
275
|
+
allowQuery: output.allowQuery,
|
276
|
+
};
|
277
|
+
}
|
278
|
+
// A list of all the files that were created by the Legacy Runtime,
|
279
|
+
// which we'd like to remove from the File System.
|
280
|
+
const toRemove = Array.from(newPathsRuntime).map(path => {
|
281
|
+
_1.debug(`Removing ${path} as part of cleanup`);
|
282
|
+
return fs_extra_1.default.remove(path);
|
283
|
+
});
|
284
|
+
// Once all the entrypoints have been processed, we'd like to
|
285
|
+
// remove all the files from `workPath` that originally weren't present
|
286
|
+
// before the Legacy Runtime began running, because the `workPath`
|
287
|
+
// is nowadays the directory in which the user keeps their source code, since
|
288
|
+
// we're no longer running separate parallel builds for every Legacy Runtime.
|
289
|
+
await Promise.all(toRemove);
|
290
|
+
// Add any Serverless Functions that were exposed by the Legacy Runtime
|
291
|
+
// to the `functions-manifest.json` file provided in `.output`.
|
292
|
+
await updateFunctionsManifest({ workPath, pages });
|
293
|
+
};
|
294
|
+
}
|
295
|
+
exports.convertRuntimeToPlugin = convertRuntimeToPlugin;
|
296
|
+
async function readJson(filePath) {
|
297
|
+
try {
|
298
|
+
const str = await fs_extra_1.default.readFile(filePath, 'utf8');
|
299
|
+
return JSON.parse(str);
|
300
|
+
}
|
301
|
+
catch (err) {
|
302
|
+
if (err.code === 'ENOENT') {
|
303
|
+
return {};
|
304
|
+
}
|
305
|
+
throw err;
|
306
|
+
}
|
307
|
+
}
|
308
|
+
/**
|
309
|
+
* If `.output/functions-manifest.json` exists, append to the pages
|
310
|
+
* property. Otherwise write a new file.
|
311
|
+
*/
|
312
|
+
async function updateFunctionsManifest({ workPath, pages, }) {
|
313
|
+
const functionsManifestPath = path_1.join(workPath, '.output', 'functions-manifest.json');
|
314
|
+
const functionsManifest = await readJson(functionsManifestPath);
|
315
|
+
if (!functionsManifest.version)
|
316
|
+
functionsManifest.version = 2;
|
317
|
+
if (!functionsManifest.pages)
|
318
|
+
functionsManifest.pages = {};
|
319
|
+
for (const [pageKey, pageConfig] of Object.entries(pages)) {
|
320
|
+
functionsManifest.pages[pageKey] = { ...pageConfig };
|
321
|
+
}
|
322
|
+
await fs_extra_1.default.writeFile(functionsManifestPath, JSON.stringify(functionsManifest));
|
323
|
+
}
|
324
|
+
exports.updateFunctionsManifest = updateFunctionsManifest;
|
325
|
+
/**
|
326
|
+
* Append routes to the `routes-manifest.json` file.
|
327
|
+
* If the file does not exist, it will be created.
|
328
|
+
*/
|
329
|
+
async function updateRoutesManifest({ workPath, redirects, rewrites, headers, dynamicRoutes, staticRoutes, }) {
|
330
|
+
const routesManifestPath = path_1.join(workPath, '.output', 'routes-manifest.json');
|
331
|
+
const routesManifest = await readJson(routesManifestPath);
|
332
|
+
if (!routesManifest.version)
|
333
|
+
routesManifest.version = 3;
|
334
|
+
if (routesManifest.pages404 === undefined)
|
335
|
+
routesManifest.pages404 = true;
|
336
|
+
if (redirects) {
|
337
|
+
if (!routesManifest.redirects)
|
338
|
+
routesManifest.redirects = [];
|
339
|
+
routesManifest.redirects.push(...redirects);
|
340
|
+
}
|
341
|
+
if (rewrites) {
|
342
|
+
if (!routesManifest.rewrites)
|
343
|
+
routesManifest.rewrites = [];
|
344
|
+
routesManifest.rewrites.push(...rewrites);
|
345
|
+
}
|
346
|
+
if (headers) {
|
347
|
+
if (!routesManifest.headers)
|
348
|
+
routesManifest.headers = [];
|
349
|
+
routesManifest.headers.push(...headers);
|
350
|
+
}
|
351
|
+
if (dynamicRoutes) {
|
352
|
+
if (!routesManifest.dynamicRoutes)
|
353
|
+
routesManifest.dynamicRoutes = [];
|
354
|
+
routesManifest.dynamicRoutes.push(...dynamicRoutes);
|
355
|
+
}
|
356
|
+
if (staticRoutes) {
|
357
|
+
if (!routesManifest.staticRoutes)
|
358
|
+
routesManifest.staticRoutes = [];
|
359
|
+
routesManifest.staticRoutes.push(...staticRoutes);
|
360
|
+
}
|
361
|
+
await fs_extra_1.default.writeFile(routesManifestPath, JSON.stringify(routesManifest));
|
362
|
+
}
|
363
|
+
exports.updateRoutesManifest = updateRoutesManifest;
|
@@ -34,5 +34,11 @@ export declare function detectBuilders(files: string[], pkg?: PackageJson | unde
|
|
34
34
|
redirectRoutes: Route[] | null;
|
35
35
|
rewriteRoutes: Route[] | null;
|
36
36
|
errorRoutes: Route[] | null;
|
37
|
+
limitedRoutes: LimitedRoutes | null;
|
37
38
|
}>;
|
39
|
+
interface LimitedRoutes {
|
40
|
+
defaultRoutes: Route[];
|
41
|
+
redirectRoutes: Route[];
|
42
|
+
rewriteRoutes: Route[];
|
43
|
+
}
|
38
44
|
export {};
|
package/dist/detect-builders.js
CHANGED
@@ -66,6 +66,7 @@ async function detectBuilders(files, pkg, options = {}) {
|
|
66
66
|
redirectRoutes: null,
|
67
67
|
rewriteRoutes: null,
|
68
68
|
errorRoutes: null,
|
69
|
+
limitedRoutes: null,
|
69
70
|
};
|
70
71
|
}
|
71
72
|
const sortedFiles = files.sort(sortFiles);
|
@@ -113,6 +114,7 @@ async function detectBuilders(files, pkg, options = {}) {
|
|
113
114
|
redirectRoutes: null,
|
114
115
|
rewriteRoutes: null,
|
115
116
|
errorRoutes: null,
|
117
|
+
limitedRoutes: null,
|
116
118
|
};
|
117
119
|
}
|
118
120
|
if (apiRoute) {
|
@@ -167,6 +169,7 @@ async function detectBuilders(files, pkg, options = {}) {
|
|
167
169
|
defaultRoutes: null,
|
168
170
|
rewriteRoutes: null,
|
169
171
|
errorRoutes: null,
|
172
|
+
limitedRoutes: null,
|
170
173
|
};
|
171
174
|
}
|
172
175
|
// If `outputDirectory` is an empty string,
|
@@ -203,6 +206,7 @@ async function detectBuilders(files, pkg, options = {}) {
|
|
203
206
|
defaultRoutes: null,
|
204
207
|
rewriteRoutes: null,
|
205
208
|
errorRoutes: null,
|
209
|
+
limitedRoutes: null,
|
206
210
|
};
|
207
211
|
}
|
208
212
|
const builders = [];
|
@@ -221,7 +225,7 @@ async function detectBuilders(files, pkg, options = {}) {
|
|
221
225
|
});
|
222
226
|
}
|
223
227
|
}
|
224
|
-
const routesResult = getRouteResult(apiRoutes, dynamicRoutes, usedOutputDirectory, apiBuilders, frontendBuilder, options);
|
228
|
+
const routesResult = getRouteResult(pkg, apiRoutes, dynamicRoutes, usedOutputDirectory, apiBuilders, frontendBuilder, options);
|
225
229
|
return {
|
226
230
|
warnings,
|
227
231
|
builders: builders.length ? builders : null,
|
@@ -230,6 +234,7 @@ async function detectBuilders(files, pkg, options = {}) {
|
|
230
234
|
defaultRoutes: routesResult.defaultRoutes,
|
231
235
|
rewriteRoutes: routesResult.rewriteRoutes,
|
232
236
|
errorRoutes: routesResult.errorRoutes,
|
237
|
+
limitedRoutes: routesResult.limitedRoutes,
|
233
238
|
};
|
234
239
|
}
|
235
240
|
exports.detectBuilders = detectBuilders;
|
@@ -670,23 +675,51 @@ function createRouteFromPath(filePath, featHandleMiss, cleanUrls) {
|
|
670
675
|
}
|
671
676
|
return { route, isDynamic };
|
672
677
|
}
|
673
|
-
function getRouteResult(apiRoutes, dynamicRoutes, outputDirectory, apiBuilders, frontendBuilder, options) {
|
678
|
+
function getRouteResult(pkg, apiRoutes, dynamicRoutes, outputDirectory, apiBuilders, frontendBuilder, options) {
|
674
679
|
var _a, _b;
|
680
|
+
const deps = Object.assign({}, pkg === null || pkg === void 0 ? void 0 : pkg.dependencies, pkg === null || pkg === void 0 ? void 0 : pkg.devDependencies);
|
675
681
|
const defaultRoutes = [];
|
676
682
|
const redirectRoutes = [];
|
677
683
|
const rewriteRoutes = [];
|
678
684
|
const errorRoutes = [];
|
685
|
+
const limitedRoutes = {
|
686
|
+
defaultRoutes: [],
|
687
|
+
redirectRoutes: [],
|
688
|
+
rewriteRoutes: [],
|
689
|
+
};
|
679
690
|
const framework = ((_a = frontendBuilder === null || frontendBuilder === void 0 ? void 0 : frontendBuilder.config) === null || _a === void 0 ? void 0 : _a.framework) || '';
|
680
691
|
const isNextjs = framework === 'nextjs' || _1.isOfficialRuntime('next', frontendBuilder === null || frontendBuilder === void 0 ? void 0 : frontendBuilder.use);
|
681
692
|
const ignoreRuntimes = (_b = slugToFramework.get(framework)) === null || _b === void 0 ? void 0 : _b.ignoreRuntimes;
|
682
693
|
if (apiRoutes && apiRoutes.length > 0) {
|
683
694
|
if (options.featHandleMiss) {
|
695
|
+
// Exclude extension names if the corresponding plugin is not found in package.json
|
696
|
+
// detectBuilders({ignoreRoutesForBuilders: ['@vercel/python']})
|
697
|
+
// return a copy of routes.
|
698
|
+
// We should exclud errorRoutes and
|
684
699
|
const extSet = detectApiExtensions(apiBuilders);
|
700
|
+
const withTag = options.tag ? `@${options.tag}` : '';
|
701
|
+
const extSetLimited = detectApiExtensions(apiBuilders.filter(b => {
|
702
|
+
if (b.use === `@vercel/python${withTag}` &&
|
703
|
+
!('vercel-plugin-python' in deps)) {
|
704
|
+
return false;
|
705
|
+
}
|
706
|
+
if (b.use === `@vercel/go${withTag}` &&
|
707
|
+
!('vercel-plugin-go' in deps)) {
|
708
|
+
return false;
|
709
|
+
}
|
710
|
+
if (b.use === `@vercel/ruby${withTag}` &&
|
711
|
+
!('vercel-plugin-ruby' in deps)) {
|
712
|
+
return false;
|
713
|
+
}
|
714
|
+
return true;
|
715
|
+
}));
|
685
716
|
if (extSet.size > 0) {
|
686
|
-
const
|
717
|
+
const extGroup = `(?:\\.(?:${Array.from(extSet)
|
687
718
|
.map(ext => ext.slice(1))
|
688
|
-
.join('|')
|
689
|
-
const
|
719
|
+
.join('|')}))`;
|
720
|
+
const extGroupLimited = `(?:\\.(?:${Array.from(extSetLimited)
|
721
|
+
.map(ext => ext.slice(1))
|
722
|
+
.join('|')}))`;
|
690
723
|
if (options.cleanUrls) {
|
691
724
|
redirectRoutes.push({
|
692
725
|
src: `^/(api(?:.+)?)/index${extGroup}?/?$`,
|
@@ -700,6 +733,18 @@ function getRouteResult(apiRoutes, dynamicRoutes, outputDirectory, apiBuilders,
|
|
700
733
|
},
|
701
734
|
status: 308,
|
702
735
|
});
|
736
|
+
limitedRoutes.redirectRoutes.push({
|
737
|
+
src: `^/(api(?:.+)?)/index${extGroupLimited}?/?$`,
|
738
|
+
headers: { Location: options.trailingSlash ? '/$1/' : '/$1' },
|
739
|
+
status: 308,
|
740
|
+
});
|
741
|
+
limitedRoutes.redirectRoutes.push({
|
742
|
+
src: `^/api/(.+)${extGroupLimited}/?$`,
|
743
|
+
headers: {
|
744
|
+
Location: options.trailingSlash ? '/api/$1/' : '/api/$1',
|
745
|
+
},
|
746
|
+
status: 308,
|
747
|
+
});
|
703
748
|
}
|
704
749
|
else {
|
705
750
|
defaultRoutes.push({ handle: 'miss' });
|
@@ -708,9 +753,16 @@ function getRouteResult(apiRoutes, dynamicRoutes, outputDirectory, apiBuilders,
|
|
708
753
|
dest: '/api/$1',
|
709
754
|
check: true,
|
710
755
|
});
|
756
|
+
limitedRoutes.defaultRoutes.push({ handle: 'miss' });
|
757
|
+
limitedRoutes.defaultRoutes.push({
|
758
|
+
src: `^/api/(.+)${extGroupLimited}$`,
|
759
|
+
dest: '/api/$1',
|
760
|
+
check: true,
|
761
|
+
});
|
711
762
|
}
|
712
763
|
}
|
713
764
|
rewriteRoutes.push(...dynamicRoutes);
|
765
|
+
limitedRoutes.rewriteRoutes.push(...dynamicRoutes);
|
714
766
|
if (typeof ignoreRuntimes === 'undefined') {
|
715
767
|
// This route is only necessary to hide the directory listing
|
716
768
|
// to avoid enumerating serverless function names.
|
@@ -755,6 +807,7 @@ function getRouteResult(apiRoutes, dynamicRoutes, outputDirectory, apiBuilders,
|
|
755
807
|
redirectRoutes,
|
756
808
|
rewriteRoutes,
|
757
809
|
errorRoutes,
|
810
|
+
limitedRoutes,
|
758
811
|
};
|
759
812
|
}
|
760
813
|
function sortFilesBySegmentCount(fileA, fileB) {
|
package/dist/fs/glob.js
CHANGED
@@ -8,6 +8,7 @@ const assert_1 = __importDefault(require("assert"));
|
|
8
8
|
const glob_1 = __importDefault(require("glob"));
|
9
9
|
const util_1 = require("util");
|
10
10
|
const fs_extra_1 = require("fs-extra");
|
11
|
+
const normalize_path_1 = require("./normalize-path");
|
11
12
|
const file_fs_ref_1 = __importDefault(require("../file-fs-ref"));
|
12
13
|
const vanillaGlob = util_1.promisify(glob_1.default);
|
13
14
|
async function glob(pattern, opts, mountpoint) {
|
@@ -31,7 +32,7 @@ async function glob(pattern, opts, mountpoint) {
|
|
31
32
|
options.dot = true;
|
32
33
|
const files = await vanillaGlob(pattern, options);
|
33
34
|
for (const relativePath of files) {
|
34
|
-
const fsPath = path_1.default.join(options.cwd, relativePath)
|
35
|
+
const fsPath = normalize_path_1.normalizePath(path_1.default.join(options.cwd, relativePath));
|
35
36
|
let stat = options.statCache[fsPath];
|
36
37
|
assert_1.default(stat, `statCache does not contain value for ${relativePath} (resolved to ${fsPath})`);
|
37
38
|
const isSymlink = options.symlinks[fsPath];
|
@@ -0,0 +1,11 @@
|
|
1
|
+
"use strict";
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
3
|
+
exports.normalizePath = void 0;
|
4
|
+
const isWin = process.platform === 'win32';
|
5
|
+
/**
|
6
|
+
* Convert Windows separators to Unix separators.
|
7
|
+
*/
|
8
|
+
function normalizePath(p) {
|
9
|
+
return isWin ? p.replace(/\\/g, '/') : p;
|
10
|
+
}
|
11
|
+
exports.normalizePath = normalizePath;
|
@@ -0,0 +1 @@
|
|
1
|
+
export default function (downloadPath: string, rootDirectory?: string | undefined): Promise<(p: string) => any>;
|